[ 521.788035] env[61649]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61649) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 521.788382] env[61649]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61649) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 521.788432] env[61649]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61649) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 521.788746] env[61649]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 521.877019] env[61649]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61649) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 521.885726] env[61649]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61649) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 521.929557] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] Creating reply queue: reply_17c3d98394d943e0a538ced2a50ef815 [ 521.937555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] Expecting reply to msg 90275df5db3c4cffbcc1ce6f98ee933d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 521.950976] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90275df5db3c4cffbcc1ce6f98ee933d [ 522.019527] env[61649]: INFO nova.virt.driver [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 522.090462] env[61649]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.090611] env[61649]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.091004] env[61649]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61649) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 525.126405] env[61649]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-9b5505fa-3a61-48cb-82a0-ebe03c9ca324 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.142272] env[61649]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61649) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 525.142447] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-d23dcca5-6946-4626-9c24-9090548d4938 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.167791] env[61649]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 65ccc. [ 525.167943] env[61649]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.077s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.168501] env[61649]: INFO nova.virt.vmwareapi.driver [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] VMware vCenter version: 7.0.3 [ 525.171875] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccebfca-8b33-4b96-975f-db03236a5f59 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.188741] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb203e1-22e2-4a34-8dd8-da0629cac17d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.194843] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be152370-742c-4bab-9434-03dd5ea9fe9e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.201307] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4199f10c-7650-43c9-b267-b2ed64747045 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.213964] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c98679-4e16-41f8-87f6-54d944b7939e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.219794] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ad002a-08a6-436d-b20a-e6b536da6cdd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.249705] env[61649]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-7dbaa7dc-489d-49ea-9fa9-4328ff074030 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.254425] env[61649]: DEBUG nova.virt.vmwareapi.driver [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] Extension org.openstack.compute already exists. {{(pid=61649) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 525.257048] env[61649]: INFO nova.compute.provider_config [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 525.257688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] Expecting reply to msg 4c0372df08254b4fae036798b908807f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.274178] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c0372df08254b4fae036798b908807f [ 525.275021] env[61649]: DEBUG nova.context [None req-e02aedd3-ffe2-4313-968b-b83b513e4ae1 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),0782fa56-eb96-447a-8437-ec43190c8b2e(cell1) {{(pid=61649) load_cells /opt/stack/nova/nova/context.py:464}} [ 525.277037] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.277274] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.277928] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.278352] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Acquiring lock "0782fa56-eb96-447a-8437-ec43190c8b2e" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.278664] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Lock "0782fa56-eb96-447a-8437-ec43190c8b2e" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.279966] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Lock "0782fa56-eb96-447a-8437-ec43190c8b2e" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.299774] env[61649]: INFO dbcounter [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Registered counter for database nova_cell0 [ 525.308048] env[61649]: INFO dbcounter [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Registered counter for database nova_cell1 [ 525.311235] env[61649]: DEBUG oslo_db.sqlalchemy.engines [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61649) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 525.311669] env[61649]: DEBUG oslo_db.sqlalchemy.engines [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61649) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 525.315721] env[61649]: DEBUG dbcounter [-] [61649] Writer thread running {{(pid=61649) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 525.317014] env[61649]: DEBUG dbcounter [-] [61649] Writer thread running {{(pid=61649) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 525.318627] env[61649]: ERROR nova.db.main.api [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 525.318627] env[61649]: result = function(*args, **kwargs) [ 525.318627] env[61649]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 525.318627] env[61649]: return func(*args, **kwargs) [ 525.318627] env[61649]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 525.318627] env[61649]: result = fn(*args, **kwargs) [ 525.318627] env[61649]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 525.318627] env[61649]: return f(*args, **kwargs) [ 525.318627] env[61649]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 525.318627] env[61649]: return db.service_get_minimum_version(context, binaries) [ 525.318627] env[61649]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 525.318627] env[61649]: _check_db_access() [ 525.318627] env[61649]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 525.318627] env[61649]: stacktrace = ''.join(traceback.format_stack()) [ 525.318627] env[61649]: [ 525.319905] env[61649]: ERROR nova.db.main.api [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 525.319905] env[61649]: result = function(*args, **kwargs) [ 525.319905] env[61649]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 525.319905] env[61649]: return func(*args, **kwargs) [ 525.319905] env[61649]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 525.319905] env[61649]: result = fn(*args, **kwargs) [ 525.319905] env[61649]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 525.319905] env[61649]: return f(*args, **kwargs) [ 525.319905] env[61649]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 525.319905] env[61649]: return db.service_get_minimum_version(context, binaries) [ 525.319905] env[61649]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 525.319905] env[61649]: _check_db_access() [ 525.319905] env[61649]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 525.319905] env[61649]: stacktrace = ''.join(traceback.format_stack()) [ 525.319905] env[61649]: [ 525.320331] env[61649]: WARNING nova.objects.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 525.320478] env[61649]: WARNING nova.objects.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Failed to get minimum service version for cell 0782fa56-eb96-447a-8437-ec43190c8b2e [ 525.320910] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Acquiring lock "singleton_lock" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.321069] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Acquired lock "singleton_lock" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.321322] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Releasing lock "singleton_lock" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.321649] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Full set of CONF: {{(pid=61649) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 525.321792] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ******************************************************************************** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 525.321923] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] Configuration options gathered from: {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 525.322083] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 525.322244] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 525.322373] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ================================================================================ {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 525.322582] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] allow_resize_to_same_host = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.322754] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] arq_binding_timeout = 300 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.322888] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] backdoor_port = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.323016] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] backdoor_socket = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.323185] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] block_device_allocate_retries = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.323349] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] block_device_allocate_retries_interval = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.323519] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cert = self.pem {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.323686] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.323858] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute_monitors = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.324038] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] config_dir = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.324220] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] config_drive_format = iso9660 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.324360] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.324531] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] config_source = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.324700] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] console_host = devstack {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.324870] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] control_exchange = nova {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.325035] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cpu_allocation_ratio = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.325206] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] daemon = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.325376] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] debug = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.325539] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] default_access_ip_network_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.325710] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] default_availability_zone = nova {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.325871] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] default_ephemeral_format = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.326083] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] default_green_pool_size = 1000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.326263] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.326431] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] default_schedule_zone = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.326592] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] disk_allocation_ratio = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.326755] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] enable_new_services = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.326962] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] enabled_apis = ['osapi_compute'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.327149] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] enabled_ssl_apis = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.327321] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] flat_injected = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.327479] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] force_config_drive = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.327639] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] force_raw_images = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.327809] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] graceful_shutdown_timeout = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.327971] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] heal_instance_info_cache_interval = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.328262] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] host = cpu-1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.328448] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.328619] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.328782] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.329010] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.329189] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_build_timeout = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.329354] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_delete_interval = 300 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.329525] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_format = [instance: %(uuid)s] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.329695] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_name_template = instance-%08x {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.329859] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_usage_audit = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.330034] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_usage_audit_period = month {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.330206] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.330374] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.330548] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] internal_service_availability_zone = internal {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.330711] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] key = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.330876] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] live_migration_retry_count = 30 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.331043] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_config_append = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.331215] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.331378] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_dir = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.331545] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.331678] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_options = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.331845] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_rotate_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332032] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_rotate_interval_type = days {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332213] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] log_rotation_type = none {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332348] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332478] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332650] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332819] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.332951] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.333116] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] long_rpc_timeout = 1800 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.333282] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] max_concurrent_builds = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.333442] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] max_concurrent_live_migrations = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.333604] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] max_concurrent_snapshots = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.333767] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] max_local_block_devices = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.333928] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] max_logfile_count = 30 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.334091] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] max_logfile_size_mb = 200 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.334255] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] maximum_instance_delete_attempts = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.334425] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metadata_listen = 0.0.0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.334596] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metadata_listen_port = 8775 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.334766] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metadata_workers = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.334928] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] migrate_max_retries = -1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.335096] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] mkisofs_cmd = genisoimage {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.335308] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.335446] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] my_ip = 10.180.1.21 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.335615] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] network_allocate_retries = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.335794] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.335965] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.336150] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] osapi_compute_listen_port = 8774 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.336326] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] osapi_compute_unique_server_name_scope = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.336498] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] osapi_compute_workers = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.336664] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] password_length = 12 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.336831] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] periodic_enable = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.337027] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] periodic_fuzzy_delay = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.337205] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] pointer_model = usbtablet {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.337378] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] preallocate_images = none {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.337544] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] publish_errors = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.337679] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] pybasedir = /opt/stack/nova {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.337839] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ram_allocation_ratio = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.338005] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] rate_limit_burst = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.338179] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] rate_limit_except_level = CRITICAL {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.338341] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] rate_limit_interval = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.338503] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reboot_timeout = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.338665] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reclaim_instance_interval = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.338827] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] record = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.339021] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reimage_timeout_per_gb = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.339197] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] report_interval = 120 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.339364] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] rescue_timeout = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.339527] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reserved_host_cpus = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.339691] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reserved_host_disk_mb = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.339854] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reserved_host_memory_mb = 512 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.340060] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] reserved_huge_pages = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.340245] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] resize_confirm_window = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.340414] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] resize_fs_using_block_device = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.340578] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] resume_guests_state_on_host_boot = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.340750] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.340915] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] rpc_response_timeout = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.341081] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] run_external_periodic_tasks = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.341255] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] running_deleted_instance_action = reap {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.341421] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.341582] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] running_deleted_instance_timeout = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.341742] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler_instance_sync_interval = 120 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.341909] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_down_time = 720 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.342079] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] servicegroup_driver = db {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.342297] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] shelved_offload_time = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.342400] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] shelved_poll_interval = 3600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.342568] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] shutdown_timeout = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.342732] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] source_is_ipv6 = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.342891] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ssl_only = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.343141] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.343313] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] sync_power_state_interval = 600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.343479] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] sync_power_state_pool_size = 1000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.343649] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] syslog_log_facility = LOG_USER {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.343808] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] tempdir = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.343973] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] timeout_nbd = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.344204] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] transport_url = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.344337] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] update_resources_interval = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.344500] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_cow_images = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.344660] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_eventlog = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.344821] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_journal = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.344982] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_json = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.345143] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_rootwrap_daemon = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.345306] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_stderr = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.345465] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] use_syslog = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.345625] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vcpu_pin_set = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.345796] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plugging_is_fatal = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.345965] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plugging_timeout = 300 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.346136] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] virt_mkfs = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.346301] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] volume_usage_poll_interval = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.346466] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] watch_log_file = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.346642] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] web = /usr/share/spice-html5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 525.346827] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_concurrency.disable_process_locking = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.347192] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.347389] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.347563] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.347741] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.347913] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.348097] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.348289] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.auth_strategy = keystone {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.348460] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.compute_link_prefix = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.348641] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.348816] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.dhcp_domain = novalocal {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.349001] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.enable_instance_password = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.349184] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.glance_link_prefix = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.349359] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.349533] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.349701] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.instance_list_per_project_cells = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.349869] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.list_records_by_skipping_down_cells = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.350037] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.local_metadata_per_cell = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.350208] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.max_limit = 1000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.350380] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.metadata_cache_expiration = 15 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.350558] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.neutron_default_tenant_id = default {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.350731] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.use_neutron_default_nets = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.350898] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.351064] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.351236] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.351410] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.351583] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_dynamic_targets = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.351753] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_jsonfile_path = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.351937] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.352154] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.backend = dogpile.cache.memcached {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.352402] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.backend_argument = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.352518] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.config_prefix = cache.oslo {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.352693] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.dead_timeout = 60.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.352861] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.debug_cache_backend = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.353027] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.enable_retry_client = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.353192] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.enable_socket_keepalive = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.353363] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.enabled = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.353528] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.enforce_fips_mode = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.353697] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.expiration_time = 600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.353865] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.hashclient_retry_attempts = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.354037] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.354205] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_dead_retry = 300 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.354368] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_password = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.354537] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.354701] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.354866] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_pool_maxsize = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.355030] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.355196] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_sasl_enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.355377] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.355547] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.355712] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.memcache_username = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.355881] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.proxies = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.356083] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.redis_password = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.356245] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.356429] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.356603] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.redis_server = localhost:6379 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.356775] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.redis_socket_timeout = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.356960] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.redis_username = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.357141] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.retry_attempts = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.357314] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.retry_delay = 0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.357481] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.socket_keepalive_count = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.357646] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.socket_keepalive_idle = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.357812] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.socket_keepalive_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.357976] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.tls_allowed_ciphers = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.358140] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.tls_cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.358301] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.tls_certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.358470] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.tls_enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.358629] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cache.tls_keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.358803] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.359002] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.auth_type = password {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.359175] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.359362] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.359529] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.359702] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.359869] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.cross_az_attach = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.360059] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.debug = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.360234] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.endpoint_template = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.360404] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.http_retries = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.360571] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.360734] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.360908] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.os_region_name = RegionOne {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.361077] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.361240] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cinder.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.361415] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.361579] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.cpu_dedicated_set = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.361739] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.cpu_shared_set = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.361907] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.image_type_exclude_list = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.362075] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.362247] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.362412] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.362578] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.362753] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.362919] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.resource_provider_association_refresh = 300 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.363085] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.shutdown_retry_interval = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.363268] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.363450] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] conductor.workers = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.363632] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] console.allowed_origins = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.363797] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] console.ssl_ciphers = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.363971] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] console.ssl_minimum_version = default {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.364165] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] consoleauth.enforce_session_timeout = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.364341] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] consoleauth.token_ttl = 600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.364519] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.364681] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.364852] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.365017] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.365181] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.365344] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.365512] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.365677] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.365843] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.366005] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.366169] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.region_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.366329] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.366488] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.366660] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.service_type = accelerator {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.366826] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.367014] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.367184] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.367346] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.367532] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.367695] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] cyborg.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.367880] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.backend = sqlalchemy {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.368092] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.connection = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.368241] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.connection_debug = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.368415] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.connection_parameters = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.368583] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.connection_recycle_time = 3600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.368750] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.connection_trace = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.368915] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.db_inc_retry_interval = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.369117] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.db_max_retries = 20 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.369295] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.db_max_retry_interval = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.369464] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.db_retry_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.369633] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.max_overflow = 50 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.369799] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.max_pool_size = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.369970] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.max_retries = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.370155] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.370335] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.mysql_wsrep_sync_wait = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.370497] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.pool_timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.370662] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.retry_interval = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.370824] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.slave_connection = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371359] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.sqlite_synchronous = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371359] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] database.use_db_reconnect = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371359] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.backend = sqlalchemy {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371564] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.connection = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371661] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.connection_debug = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371808] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.connection_parameters = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.371980] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.connection_recycle_time = 3600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373419] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.connection_trace = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373419] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.db_inc_retry_interval = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373419] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.db_max_retries = 20 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373419] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.db_max_retry_interval = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373419] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.db_retry_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373419] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.max_overflow = 50 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373724] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.max_pool_size = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373724] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.max_retries = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373724] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373724] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373932] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.pool_timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.373932] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.retry_interval = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.374093] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.slave_connection = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.374258] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] api_database.sqlite_synchronous = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.374434] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] devices.enabled_mdev_types = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.374614] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.374790] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.374958] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ephemeral_storage_encryption.enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.375123] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.375298] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.api_servers = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.375465] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.375631] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.375798] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.375959] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.376135] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.376304] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.debug = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.376506] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.default_trusted_certificate_ids = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.376641] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.enable_certificate_validation = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.376807] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.enable_rbd_download = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.376993] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.377175] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.377342] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.377506] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.377668] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.377834] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.num_retries = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.378006] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.rbd_ceph_conf = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.378174] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.rbd_connect_timeout = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.378347] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.rbd_pool = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.378517] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.rbd_user = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.378680] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.region_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.378843] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.379040] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.379226] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.service_type = image {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.379397] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.379561] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.379723] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.379886] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.380096] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.380287] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.verify_glance_signatures = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.380454] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] glance.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.380624] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] guestfs.debug = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.380793] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] mks.enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.381161] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.381358] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] image_cache.manager_interval = 2400 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.381532] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] image_cache.precache_concurrency = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.381707] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] image_cache.remove_unused_base_images = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.381879] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.382050] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.382230] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] image_cache.subdirectory_name = _base {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.382409] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.api_max_retries = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.382577] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.api_retry_interval = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.382743] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.382907] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.auth_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.383070] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.383230] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.383397] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.383561] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.conductor_group = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.383723] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.383883] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.384058] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.384234] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.384395] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.384558] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.384718] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.384885] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.peer_list = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.385047] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.region_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.385209] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.385375] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.serial_console_state_timeout = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.385536] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.385708] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.service_type = baremetal {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.385872] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.shard = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.386038] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.386201] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.386364] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.386527] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.386709] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.386882] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ironic.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.387087] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.387268] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] key_manager.fixed_key = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.387455] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.387620] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.barbican_api_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.387784] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.barbican_endpoint = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.387958] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.barbican_endpoint_type = public {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.388136] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.barbican_region_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.388302] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.388465] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.388630] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.388794] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.388982] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.389183] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.number_of_retries = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.389359] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.retry_delay = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.389527] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.send_service_user_token = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.389693] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.389855] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.390021] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.verify_ssl = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.390182] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican.verify_ssl_path = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.390349] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.390516] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.auth_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.390677] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.390836] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391002] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391167] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391329] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391494] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391655] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] barbican_service_user.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391826] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.approle_role_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.391991] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.approle_secret_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.392175] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.392339] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.392505] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.392667] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.392825] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.393001] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.kv_mountpoint = secret {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.393162] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.kv_path = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.393329] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.kv_version = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.393491] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.namespace = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.393708] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.root_token_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.393938] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.394118] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.ssl_ca_crt_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.394286] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.394456] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.use_ssl = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.394629] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.394804] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.394972] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.auth_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.395137] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.395301] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.395469] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.395634] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.395796] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.395957] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.396139] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.396306] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.396469] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.396630] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.396790] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.region_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.396967] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.397140] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.397313] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.service_type = identity {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.397476] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.397638] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.397801] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.397962] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.398145] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.398310] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] keystone.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.398513] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.connection_uri = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.398678] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_mode = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.398849] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.399055] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_models = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.399241] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_power_governor_high = performance {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.399415] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.399581] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_power_management = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.399755] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.399922] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.device_detach_attempts = 8 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.400105] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.device_detach_timeout = 20 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.400278] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.disk_cachemodes = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.400441] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.disk_prefix = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.400608] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.enabled_perf_events = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.400775] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.file_backed_memory = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.400940] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.gid_maps = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.401101] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.hw_disk_discard = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.401260] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.hw_machine_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.401452] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_rbd_ceph_conf = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.401623] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.401787] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.401957] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_rbd_glance_store_name = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.402130] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_rbd_pool = rbd {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.402297] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_type = default {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.402454] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.images_volume_group = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.402616] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.inject_key = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.402779] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.inject_partition = -2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.402944] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.inject_password = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.403109] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.iscsi_iface = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.403275] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.iser_use_multipath = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.403444] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.403604] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.403769] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_downtime = 500 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.403932] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.404109] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.404277] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_inbound_addr = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.404446] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.404611] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.404775] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_scheme = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.404948] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_timeout_action = abort {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.405118] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_tunnelled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.405276] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_uri = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.405437] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.live_migration_with_native_tls = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.405597] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.max_queues = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.405761] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.405999] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.406165] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.nfs_mount_options = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.406480] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.406659] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.406826] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.406998] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.407167] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.407332] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.num_pcie_ports = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.407499] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.407665] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.pmem_namespaces = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.407824] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.quobyte_client_cfg = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.408130] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.408308] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.408473] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.408636] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.408798] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rbd_secret_uuid = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.408956] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rbd_user = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.409150] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.409329] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.409520] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rescue_image_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.409701] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rescue_kernel_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.409864] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rescue_ramdisk_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.410038] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.410235] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.rx_queue_size = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.410409] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.smbfs_mount_options = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.410687] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.410861] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.snapshot_compression = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.411024] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.snapshot_image_format = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.411243] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.411410] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.sparse_logical_volumes = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.411575] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.swtpm_enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.411746] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.swtpm_group = tss {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.411915] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.swtpm_user = tss {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.412097] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.sysinfo_serial = unique {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.412264] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.tb_cache_size = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.412422] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.tx_queue_size = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.412584] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.uid_maps = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.412747] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.use_virtio_for_bridges = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.412916] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.virt_type = kvm {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.413087] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.volume_clear = zero {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.413251] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.volume_clear_size = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.413417] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.volume_use_multipath = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.413576] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_cache_path = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.413744] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.413912] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.414077] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.414246] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.414580] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.414770] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.vzstorage_mount_user = stack {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.414941] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.415120] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.415300] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.auth_type = password {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.415465] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.415628] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.415792] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.415955] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.416133] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.416312] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.default_floating_pool = public {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.416474] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.416639] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.extension_sync_interval = 600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.416803] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.http_retries = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.416968] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.417128] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.417289] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.417458] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.417619] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.417788] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.ovs_bridge = br-int {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.417954] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.physnets = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.418121] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.region_name = RegionOne {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.418287] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.418455] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.service_metadata_proxy = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.418615] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.418782] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.service_type = network {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.418945] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.419135] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.419303] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.419465] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.419646] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.419809] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] neutron.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.419986] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] notifications.bdms_in_notifications = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.420200] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] notifications.default_level = INFO {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.420387] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] notifications.notification_format = unversioned {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.420556] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] notifications.notify_on_state_change = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.420732] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.420910] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] pci.alias = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.421083] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] pci.device_spec = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.421249] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] pci.report_in_placement = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.421423] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.421598] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.auth_type = password {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.421768] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.421931] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.422091] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.422257] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.422417] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.422576] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.422736] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.default_domain_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.422895] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.default_domain_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.423055] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.domain_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.423217] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.domain_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.423378] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.423539] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.423699] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.423857] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.424025] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.424203] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.password = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.424366] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.project_domain_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.424532] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.project_domain_name = Default {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.424698] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.project_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.424869] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.project_name = service {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.425039] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.region_name = RegionOne {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.425203] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.425365] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.425537] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.service_type = placement {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.425701] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.425860] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426021] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426185] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.system_scope = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426344] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426504] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.trust_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426661] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.user_domain_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426828] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.user_domain_name = Default {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.426987] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.user_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.427162] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.username = placement {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.427341] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.427501] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] placement.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.427678] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.cores = 20 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.427845] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.count_usage_from_placement = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.428032] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.428217] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.injected_file_content_bytes = 10240 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.428390] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.injected_file_path_length = 255 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.428561] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.injected_files = 5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.428730] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.instances = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.428898] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.key_pairs = 100 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.429088] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.metadata_items = 128 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.429264] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.ram = 51200 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.429432] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.recheck_quota = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.429604] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.server_group_members = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.429767] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] quota.server_groups = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.429940] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.430123] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.430302] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.image_metadata_prefilter = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.430469] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.430636] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.max_attempts = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.430802] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.max_placement_results = 1000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.430966] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.431128] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.431295] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.431471] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] scheduler.workers = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.431646] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.431819] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.432074] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.432193] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.432362] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.432528] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.432701] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.432889] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.433058] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.host_subset_size = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.433223] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.433385] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.433551] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.433716] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.isolated_hosts = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.433879] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.isolated_images = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.434039] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.434201] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.434371] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.434536] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.pci_in_placement = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.434701] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.434865] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.435030] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.435192] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.435355] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.435519] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.435680] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.track_instance_changes = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.435864] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.436069] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metrics.required = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.436253] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metrics.weight_multiplier = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.436418] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.436584] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] metrics.weight_setting = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.436898] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.437075] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] serial_console.enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.437256] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] serial_console.port_range = 10000:20000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.437428] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.437597] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.437765] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] serial_console.serialproxy_port = 6083 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.437934] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.438108] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.auth_type = password {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.438270] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.438427] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.438590] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.438753] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.438913] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.439116] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.send_service_user_token = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.439281] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.439443] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] service_user.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.439615] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.agent_enabled = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.439792] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.440128] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.440337] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.440515] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.html5proxy_port = 6082 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.440681] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.image_compression = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.440844] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.jpeg_compression = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.441006] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.playback_compression = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.441181] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.server_listen = 127.0.0.1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.441353] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.441514] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.streaming_mode = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.441674] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] spice.zlib_compression = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.441840] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] upgrade_levels.baseapi = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442012] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] upgrade_levels.compute = auto {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442176] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] upgrade_levels.conductor = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442334] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] upgrade_levels.scheduler = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442500] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442664] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442823] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.442981] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.443146] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.443312] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.443462] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.443624] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.443781] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vendordata_dynamic_auth.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.443953] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.api_retry_count = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.444130] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.ca_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.444307] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.444476] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.cluster_name = testcl1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.444641] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.connection_pool_size = 10 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.444801] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.console_delay_seconds = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.444969] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.datastore_regex = ^datastore.* {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.445174] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.445348] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.host_password = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.445518] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.host_port = 443 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.445687] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.host_username = administrator@vsphere.local {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.445856] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.insecure = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446021] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.integration_bridge = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446185] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.maximum_objects = 100 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446344] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.pbm_default_policy = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446504] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.pbm_enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446664] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.pbm_wsdl_location = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446834] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.446994] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.serial_port_proxy_uri = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.447153] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.serial_port_service_uri = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.447319] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.task_poll_interval = 0.5 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.447491] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.use_linked_clone = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.447662] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.vnc_keymap = en-us {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.447828] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.vnc_port = 5900 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.447998] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vmware.vnc_port_total = 10000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.448204] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.auth_schemes = ['none'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.448381] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.448675] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.448860] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.449059] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.novncproxy_port = 6080 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.449249] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.server_listen = 127.0.0.1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.449426] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.449591] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.vencrypt_ca_certs = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.449751] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.vencrypt_client_cert = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.449911] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vnc.vencrypt_client_key = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.450136] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.450325] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.450494] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.450659] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.450823] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.disable_rootwrap = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.450988] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.enable_numa_live_migration = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.451157] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.451320] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.451483] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.451644] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.libvirt_disable_apic = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.451805] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.451966] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.452144] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.452320] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.452492] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.452654] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.452815] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.452976] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.453187] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.453401] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.453550] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.453720] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.client_socket_timeout = 900 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.453886] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.default_pool_size = 1000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.454053] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.keep_alive = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.454225] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.max_header_line = 16384 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.454390] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.454554] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.ssl_ca_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.454715] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.ssl_cert_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.454877] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.ssl_key_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.455043] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.tcp_keepidle = 600 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.455220] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.455388] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] zvm.ca_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.455549] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] zvm.cloud_connector_url = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.455827] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.456000] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] zvm.reachable_timeout = 300 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.456201] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.enforce_new_defaults = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.456394] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.enforce_scope = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.456588] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.policy_default_rule = default {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.456771] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.456945] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.policy_file = policy.yaml {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.457117] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.457282] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.457442] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.457600] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.457762] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.457927] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.458103] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.458281] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.connection_string = messaging:// {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.458446] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.enabled = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.458614] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.es_doc_type = notification {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.458774] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.es_scroll_size = 10000 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.458940] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.es_scroll_time = 2m {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.459131] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.filter_error_trace = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.459306] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.hmac_keys = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.459476] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.sentinel_service_name = mymaster {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.459644] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.socket_timeout = 0.1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.459805] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.trace_requests = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.459975] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler.trace_sqlalchemy = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.460208] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler_jaeger.process_tags = {} {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.460383] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler_jaeger.service_name_prefix = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.460552] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] profiler_otlp.service_name_prefix = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.460718] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] remote_debug.host = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.460880] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] remote_debug.port = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.461061] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.461226] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.461388] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.461549] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.461710] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.461870] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.462030] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.462191] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.462352] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.462521] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.462680] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.462847] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.463012] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.463184] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.463354] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.463522] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.463680] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.463850] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464017] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464183] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464348] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464509] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464668] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464831] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.464993] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.465157] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.465317] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.465476] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.465640] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.465803] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.ssl = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.465974] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.466144] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.466303] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.466469] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.466636] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.466795] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.466981] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.467145] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_notifications.retry = -1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.467324] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.467494] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.467662] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.auth_section = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.467823] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.auth_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.467979] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.cafile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.468154] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.certfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.468317] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.collect_timing = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.468502] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.connect_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.468669] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.connect_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.468829] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.endpoint_id = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469011] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.endpoint_override = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469181] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.insecure = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469341] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.keyfile = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469499] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.max_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469655] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.min_version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469810] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.region_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.469971] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.retriable_status_codes = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.470143] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.service_name = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.470311] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.service_type = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.470474] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.split_loggers = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.470631] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.status_code_retries = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.470788] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.status_code_retry_delay = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.470946] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.timeout = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.471103] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.valid_interfaces = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.471266] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_limit.version = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.471426] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_reports.file_event_handler = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.471588] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.471746] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] oslo_reports.log_dir = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.471915] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.472087] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.472250] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.472416] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.472579] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.472736] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.472905] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.473064] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_ovs_privileged.group = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.473223] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.473386] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.473587] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.473698] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] vif_plug_ovs_privileged.user = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.473865] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.474040] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.474212] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.474382] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.474551] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.474714] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.474879] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.475041] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.475218] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.475386] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.isolate_vif = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.475575] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.475739] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.475908] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.476088] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.476255] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_vif_ovs.per_port_bridge = False {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.476418] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] os_brick.lock_path = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.476587] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] privsep_osbrick.capabilities = [21] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.476745] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] privsep_osbrick.group = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.476901] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] privsep_osbrick.helper_command = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.477070] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.477230] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.477387] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] privsep_osbrick.user = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.477557] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.477715] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] nova_sys_admin.group = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.477868] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] nova_sys_admin.helper_command = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.478029] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.478190] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.478343] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] nova_sys_admin.user = None {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 525.478473] env[61649]: DEBUG oslo_service.service [None req-7c95e0e3-904e-4dbe-97a1-c5cf66d2e324 None None] ******************************************************************************** {{(pid=61649) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 525.478887] env[61649]: INFO nova.service [-] Starting compute node (version 27.1.0) [ 525.479737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 60c26b668fb84c2f89c741701d896211 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.486280] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60c26b668fb84c2f89c741701d896211 [ 525.487310] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Getting list of instances from cluster (obj){ [ 525.487310] env[61649]: value = "domain-c8" [ 525.487310] env[61649]: _type = "ClusterComputeResource" [ 525.487310] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 525.488307] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debefa18-371d-40f3-a7c1-40d27df120b6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.497105] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Got total of 0 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 525.497602] env[61649]: WARNING nova.virt.vmwareapi.driver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 525.498020] env[61649]: INFO nova.virt.node [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Generated node identity dad32f24-3843-462d-a3f9-4ef2a60037c4 [ 525.498245] env[61649]: INFO nova.virt.node [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Wrote node identity dad32f24-3843-462d-a3f9-4ef2a60037c4 to /opt/stack/data/n-cpu-1/compute_id [ 525.498610] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 1ac1b849805541be85fcafa1cf069448 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.509625] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ac1b849805541be85fcafa1cf069448 [ 525.510090] env[61649]: WARNING nova.compute.manager [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Compute nodes ['dad32f24-3843-462d-a3f9-4ef2a60037c4'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 525.510663] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 46ebee5e7e744f59b0b550d4fb02a623 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.531053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46ebee5e7e744f59b0b550d4fb02a623 [ 525.531706] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 01b02ca11d754975afaaa4f1e8311409 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.541631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01b02ca11d754975afaaa4f1e8311409 [ 525.542167] env[61649]: INFO nova.compute.manager [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 525.542589] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg e646fcaf90f34c159657b4946a8a2701 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.552928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e646fcaf90f34c159657b4946a8a2701 [ 525.553639] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg be7f09f3f3a14c31962e6c8ac478fc85 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.562998] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be7f09f3f3a14c31962e6c8ac478fc85 [ 525.563466] env[61649]: WARNING nova.compute.manager [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 525.563680] env[61649]: DEBUG oslo_concurrency.lockutils [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.563887] env[61649]: DEBUG oslo_concurrency.lockutils [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.564044] env[61649]: DEBUG oslo_concurrency.lockutils [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.564207] env[61649]: DEBUG nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 525.565179] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b8e5c3-b88d-47b2-b0fa-d0426e4be400 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.573068] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb325e20-46f1-4c79-ae71-bd58d341c594 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.586463] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9cd171-d1b0-42fa-92bc-16f1b32ffce4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.592264] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2090ed-6c40-43a3-a6ff-16053365e174 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.621366] env[61649]: DEBUG nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181815MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 525.621514] env[61649]: DEBUG oslo_concurrency.lockutils [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.621699] env[61649]: DEBUG oslo_concurrency.lockutils [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.622023] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg b891888ac22041c69777be1e36502d98 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.633248] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b891888ac22041c69777be1e36502d98 [ 525.633745] env[61649]: WARNING nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] No compute node record for cpu-1:dad32f24-3843-462d-a3f9-4ef2a60037c4: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host dad32f24-3843-462d-a3f9-4ef2a60037c4 could not be found. [ 525.634730] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 3ad855476a164b4bbf5c48c58848878d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.645570] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ad855476a164b4bbf5c48c58848878d [ 525.646206] env[61649]: INFO nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: dad32f24-3843-462d-a3f9-4ef2a60037c4 [ 525.646574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 1231d6a6a116428088022b1adb6b7f20 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.655894] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1231d6a6a116428088022b1adb6b7f20 [ 525.656717] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg d0704837d6554de4beed5325ec3be5ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.673476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0704837d6554de4beed5325ec3be5ba [ 525.673978] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 6c9fa746647c46eda05e6d91824e9ab7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.696343] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c9fa746647c46eda05e6d91824e9ab7 [ 525.696949] env[61649]: DEBUG nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 525.697102] env[61649]: DEBUG nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 525.790714] env[61649]: INFO nova.scheduler.client.report [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] [req-c28d4dce-2518-4b04-9abe-3467365b71d9] Created resource provider record via placement API for resource provider with UUID dad32f24-3843-462d-a3f9-4ef2a60037c4 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 525.806689] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387b3bfa-306e-43f5-93d1-36aea4c5a879 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.813782] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30987988-8be3-4d90-9bab-19103e69e4e6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.842066] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfd27b1-a20a-47d9-b3ea-396a5ab0e259 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.848662] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7058b2d7-4027-4aac-9502-8fce7309d4fe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.862338] env[61649]: DEBUG nova.compute.provider_tree [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 525.862861] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 1f7c555013b6437b88a29bf4a19fdbe3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.869742] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f7c555013b6437b88a29bf4a19fdbe3 [ 525.898573] env[61649]: DEBUG nova.scheduler.client.report [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Updated inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 525.898811] env[61649]: DEBUG nova.compute.provider_tree [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Updating resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 generation from 0 to 1 during operation: update_inventory {{(pid=61649) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 525.898953] env[61649]: DEBUG nova.compute.provider_tree [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 525.943317] env[61649]: DEBUG nova.compute.provider_tree [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Updating resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 generation from 1 to 2 during operation: update_traits {{(pid=61649) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 525.945474] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Expecting reply to msg 50ee08b808cc49798959f8fa5548c4e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 525.959903] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50ee08b808cc49798959f8fa5548c4e5 [ 525.960638] env[61649]: DEBUG nova.compute.resource_tracker [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 525.960814] env[61649]: DEBUG oslo_concurrency.lockutils [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.339s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.960974] env[61649]: DEBUG nova.service [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Creating RPC server for service compute {{(pid=61649) start /opt/stack/nova/nova/service.py:182}} [ 525.970787] env[61649]: INFO oslo.messaging._drivers.impl_rabbit [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Creating fanout queue: compute_fanout_b0926295821c4b6fa3ae70cbba06d6eb [ 525.973992] env[61649]: DEBUG nova.service [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] Join ServiceGroup membership for this service compute {{(pid=61649) start /opt/stack/nova/nova/service.py:199}} [ 525.974177] env[61649]: DEBUG nova.servicegroup.drivers.db [None req-87fc200c-9d41-4a85-8b4e-e8c72192bded None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61649) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 527.976695] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 527.977466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3c5a3c6b8b784d339fed3d93d49016b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 527.987072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c5a3c6b8b784d339fed3d93d49016b5 [ 527.987646] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 527.987646] env[61649]: value = "domain-c8" [ 527.987646] env[61649]: _type = "ClusterComputeResource" [ 527.987646] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 527.988699] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5298d8eb-7275-40eb-ae50-277ca68b8757 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.997559] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 0 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 527.997784] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 527.998079] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 527.998079] env[61649]: value = "domain-c8" [ 527.998079] env[61649]: _type = "ClusterComputeResource" [ 527.998079] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 527.998928] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba970c29-3ca9-4ae3-a86e-670317a1ea1f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.006393] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 0 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 530.977429] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 35e4c8d145b24dd680eff96bddbdf551 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 530.987769] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35e4c8d145b24dd680eff96bddbdf551 [ 535.317741] env[61649]: DEBUG dbcounter [-] [61649] Writing DB stats nova_cell0:SELECT=1 {{(pid=61649) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 535.319339] env[61649]: DEBUG dbcounter [-] [61649] Writing DB stats nova_cell1:SELECT=1 {{(pid=61649) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 557.943938] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "683996e1-4e16-4add-8fa6-3c2843ebbf21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.944289] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Lock "683996e1-4e16-4add-8fa6-3c2843ebbf21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.944765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg d26c42a641af4742867583ba1a223263 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 557.958589] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d26c42a641af4742867583ba1a223263 [ 557.959196] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 557.960948] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 2fee27f2e60c4c528e9d285b0dd4f558 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.026304] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fee27f2e60c4c528e9d285b0dd4f558 [ 558.049648] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.049883] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.051392] env[61649]: INFO nova.compute.claims [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.053007] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 82048df5093343c68e07171ee4aa1c9b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.112572] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82048df5093343c68e07171ee4aa1c9b [ 558.114288] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg e25c7aa4343445fa82bb70fe4965cd6a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.124418] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e25c7aa4343445fa82bb70fe4965cd6a [ 558.153756] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58d4965-e569-48b5-ae1f-65393db4d32b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.162078] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd83654-4f8f-45b3-8dea-81447d9e8af3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.193080] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2c14d2-a977-484f-86b1-89a9f5518d7d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.201343] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4712b230-88d4-41e4-b098-552cf922c9da {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.216284] env[61649]: DEBUG nova.compute.provider_tree [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.217054] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg e51a4bfeac35413291bf5ee8ce48e058 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.228433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e51a4bfeac35413291bf5ee8ce48e058 [ 558.229705] env[61649]: DEBUG nova.scheduler.client.report [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 558.232096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 3ace3c9e260c4e56bc20d6fe8139eabc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.255873] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ace3c9e260c4e56bc20d6fe8139eabc [ 558.256811] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.207s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.257615] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 558.259476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg f584b659a2a24e92a7cf9fdc0b31ae76 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.292413] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f584b659a2a24e92a7cf9fdc0b31ae76 [ 558.294255] env[61649]: DEBUG nova.compute.utils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 558.295041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 696cb4addf214f08922e2db936a5582b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.296077] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Not allocating networking since 'none' was specified. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 558.306373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 696cb4addf214f08922e2db936a5582b [ 558.307035] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 558.309537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 415b0bf284b14e0b81b5bb58b145d3bf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.340765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 415b0bf284b14e0b81b5bb58b145d3bf [ 558.343589] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 8c7d0f0484ea4381ab77d54eb89fb1a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 558.372523] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c7d0f0484ea4381ab77d54eb89fb1a7 [ 558.373736] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 560.316713] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 560.317033] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 560.317123] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.317319] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 560.317446] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.317591] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 560.317798] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 560.317952] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 560.318356] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 560.318496] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 560.318660] env[61649]: DEBUG nova.virt.hardware [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 560.319564] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b83c978-1314-4cff-b52c-c7a22a21bf50 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.327457] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945d9c92-bf24-45e2-b17b-cfb63c738ce8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.347261] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fab82ed-77df-4260-a7b1-fc46bc75ad64 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.372192] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Instance VIF info [] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 560.384334] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.384697] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1ed1021-7dfa-4a58-9374-6abd80594531 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.397819] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Created folder: OpenStack in parent group-v4. [ 560.399041] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Creating folder: Project (90d6714fb9c944a38be6c59ab28bf5a3). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.399310] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b6e3ad5-6f05-4f68-98f3-c229f18e7cd6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.409370] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Created folder: Project (90d6714fb9c944a38be6c59ab28bf5a3) in parent group-v51588. [ 560.409370] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Creating folder: Instances. Parent ref: group-v51589. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.409370] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14b9bc5f-1aa8-497f-9101-0295ad915cd1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.431536] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Created folder: Instances in parent group-v51589. [ 560.431912] env[61649]: DEBUG oslo.service.loopingcall [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.432092] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 560.432309] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-869caed1-df77-461f-904d-8cfab0edc11f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.456804] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 560.456804] env[61649]: value = "task-158085" [ 560.456804] env[61649]: _type = "Task" [ 560.456804] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.471036] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158085, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.966185] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158085, 'name': CreateVM_Task, 'duration_secs': 0.296956} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.966343] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 560.967319] env[61649]: DEBUG oslo_vmware.service [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb04b91-0f65-4b8d-8e15-545de46f644e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.972937] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.973097] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.973712] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 560.973940] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbf336d-ee89-4e14-8d74-26c18c746ca2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.978520] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Waiting for the task: (returnval){ [ 560.978520] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b7bce2-806a-678a-a966-b6b9a1599fb8" [ 560.978520] env[61649]: _type = "Task" [ 560.978520] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.986949] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b7bce2-806a-678a-a966-b6b9a1599fb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.492511] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.492852] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 561.492975] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.493117] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.493521] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 561.493766] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fbd1cfc-9265-44fa-b56d-ccffe0f63290 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.510360] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 561.510561] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 561.511995] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f031a117-f39c-4d6f-9fb7-6fa88b3d1b57 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.519473] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e095976-faad-4f64-9ff0-d259090232d0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.529679] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Waiting for the task: (returnval){ [ 561.529679] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ab18a0-57bf-adff-10f0-39c980851afd" [ 561.529679] env[61649]: _type = "Task" [ 561.529679] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.547652] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 561.547652] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Creating directory with path [datastore1] vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 561.547652] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae0fa1f4-d0af-48e4-95a5-8e75d7723115 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.563769] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Created directory with path [datastore1] vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 561.564088] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Fetch image to [datastore1] vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 561.564331] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 561.565448] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797e22d2-cf74-426a-815b-d42ef25486c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.574207] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b05620-077b-478f-941c-3cffcd1b5354 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.583455] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcba334-32c1-449c-a6f7-a29a30d25c1e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.622720] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a92932-3f0f-4291-9ecf-8b3bf430f89e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.635087] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4aeb1758-3a76-4bef-9cb3-b9f170a944f7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.674823] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 561.752432] env[61649]: DEBUG oslo_vmware.rw_handles [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 561.827764] env[61649]: DEBUG oslo_vmware.rw_handles [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 561.827924] env[61649]: DEBUG oslo_vmware.rw_handles [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 562.144103] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.144321] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.144771] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg ecc2966523f14afdb583dad19c0656ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.162578] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecc2966523f14afdb583dad19c0656ac [ 562.163415] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 562.164787] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg b60794c0e71b41b6927bec641c20615c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.229975] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b60794c0e71b41b6927bec641c20615c [ 562.272563] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.272810] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.274293] env[61649]: INFO nova.compute.claims [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.276208] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 583e7d3cdfa04afe9317c4d18eb3603c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.331481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 583e7d3cdfa04afe9317c4d18eb3603c [ 562.333285] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 01b89c720dff412da77de9b3e044a2c2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.352225] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01b89c720dff412da77de9b3e044a2c2 [ 562.408449] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d29bca7-2297-4a68-92c6-ac047f0c93d6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.415871] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0add4498-910f-42f8-8cdd-05cd7681add5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.458406] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eae0fdf-1ed1-4f1d-9a0b-b59011e2a38a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.466759] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae74f58-c634-4e21-9eea-24bb92608b1b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.481633] env[61649]: DEBUG nova.compute.provider_tree [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.482143] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 7783139285c04601bb6c289887280deb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.492553] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7783139285c04601bb6c289887280deb [ 562.492553] env[61649]: DEBUG nova.scheduler.client.report [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 562.493500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg cd9390a70cb24226bcf3bcf8ea6c787d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.508554] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd9390a70cb24226bcf3bcf8ea6c787d [ 562.509528] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.237s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.509897] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 562.511536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 86a7ba8429774446a87e2110e01a6273 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.551245] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86a7ba8429774446a87e2110e01a6273 [ 562.553107] env[61649]: DEBUG nova.compute.utils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 562.553690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 7a0eadba9a8b49daa1429f0f62de254d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.554531] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 562.554800] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 562.573792] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a0eadba9a8b49daa1429f0f62de254d [ 562.575323] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 562.577533] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 8b873d280c4f42a483c79c90c9e1312f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.615713] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b873d280c4f42a483c79c90c9e1312f [ 562.618165] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 2e2157e47e60409ca9146073fd5765d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 562.662765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e2157e47e60409ca9146073fd5765d2 [ 562.663081] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 562.686373] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 562.686605] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 562.686757] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 562.687258] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 562.687258] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 562.687258] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 562.687416] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 562.687562] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 562.687719] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 562.687871] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 562.688049] env[61649]: DEBUG nova.virt.hardware [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 562.688947] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767e3841-910e-42a0-b403-ca66a539c37d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.697066] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7dcc9f-8bef-4d5f-8220-5248ab29291d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.873354] env[61649]: DEBUG nova.policy [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e6b5c355ece4a0e97df6e999af64b58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf89869331f1470a8cf537607173c67f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 564.798327] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Successfully created port: 2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 568.945225] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Successfully updated port: 2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 568.945870] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg b2de32ed9ea243dca1328b0212e5c2e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 568.957504] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2de32ed9ea243dca1328b0212e5c2e3 [ 568.958251] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "refresh_cache-b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.958341] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquired lock "refresh_cache-b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.958502] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 568.958891] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 043c30aa6e514ac1bfe9892b6f33e833 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 568.969000] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 043c30aa6e514ac1bfe9892b6f33e833 [ 569.149591] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 569.529048] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "29281253-e489-48f5-b219-75ae984adb00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.529287] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "29281253-e489-48f5-b219-75ae984adb00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.529746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 163def922dab4d49bc4b772d2d965cfa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.547465] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 163def922dab4d49bc4b772d2d965cfa [ 569.547990] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 569.549729] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 5c361660ac2d4305aa5ba2a8b1fba5dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.581593] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c361660ac2d4305aa5ba2a8b1fba5dc [ 569.608713] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.608713] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.610385] env[61649]: INFO nova.compute.claims [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.611945] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg ea32d1f26361449392b1b6507d97aea9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.659694] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea32d1f26361449392b1b6507d97aea9 [ 569.661509] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg bba78562636d4cce9f3634da1197f5db in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.673459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bba78562636d4cce9f3634da1197f5db [ 569.744459] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac1eee5-e560-493b-aa74-5ac29804a01e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.752607] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac5ba56-ad48-4334-b8cd-1697ec3257dc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.796487] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7875e171-248d-48f4-9f12-7e7fb160629c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.804852] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15996aa-6c17-49a0-b28e-29032da15500 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.821939] env[61649]: DEBUG nova.compute.provider_tree [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.822573] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 61474f1f54b34734bc9c40ed277859ea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.824814] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Updating instance_info_cache with network_info: [{"id": "2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2", "address": "fa:16:3e:75:88:be", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a56cfa7-02", "ovs_interfaceid": "2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.825252] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 6083a7e7b2904c9f874ab1e2a0e446a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.829847] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61474f1f54b34734bc9c40ed277859ea [ 569.830850] env[61649]: DEBUG nova.scheduler.client.report [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 569.833247] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg a0ae9f8855ba49fe9a2f82b8906d74bf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.843660] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0ae9f8855ba49fe9a2f82b8906d74bf [ 569.844550] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.236s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.845092] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 569.846808] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg c454bc9350da47d5ba547eafe8a315c6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.848589] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6083a7e7b2904c9f874ab1e2a0e446a9 [ 569.849044] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Releasing lock "refresh_cache-b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.849286] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Instance network_info: |[{"id": "2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2", "address": "fa:16:3e:75:88:be", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a56cfa7-02", "ovs_interfaceid": "2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 569.849899] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:88:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 569.858435] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Creating folder: Project (cf89869331f1470a8cf537607173c67f). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 569.859156] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-218d0097-e9da-45c1-84ca-7f3fa4b09fc9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.878293] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Created folder: Project (cf89869331f1470a8cf537607173c67f) in parent group-v51588. [ 569.878596] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Creating folder: Instances. Parent ref: group-v51592. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 569.879264] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f023a1c-95a9-4635-ad0d-b99264f524c4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.893342] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Created folder: Instances in parent group-v51592. [ 569.894883] env[61649]: DEBUG oslo.service.loopingcall [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.895124] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 569.895336] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f2c02e9-faac-4929-bc64-35bc225de3b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.918274] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c454bc9350da47d5ba547eafe8a315c6 [ 569.919703] env[61649]: DEBUG nova.compute.utils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.929818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg b481b69b066b4fd489c54fd386f4b6c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 569.936026] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 569.936026] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.938492] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 569.938492] env[61649]: value = "task-158088" [ 569.938492] env[61649]: _type = "Task" [ 569.938492] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.947083] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158088, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.947600] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b481b69b066b4fd489c54fd386f4b6c8 [ 569.948243] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 569.949883] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 3fdda40f5b5840f28403caf72a7b10f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.005101] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fdda40f5b5840f28403caf72a7b10f7 [ 570.007793] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 82675589495146ee9de0b195a6b36dc8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.031542] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquiring lock "2c9b8ba9-193e-468f-bc4e-006ab413b374" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.031776] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Lock "2c9b8ba9-193e-468f-bc4e-006ab413b374" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.032254] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 7efe1b1a109444a4989f95792c326f66 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.040648] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7efe1b1a109444a4989f95792c326f66 [ 570.041111] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 570.043001] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 4de844a6ab0349acb768ade23709784d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.054168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82675589495146ee9de0b195a6b36dc8 [ 570.054168] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 570.078655] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 570.078655] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 570.078655] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.078894] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 570.078894] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.078894] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 570.078894] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 570.078894] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 570.079125] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 570.079125] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 570.079125] env[61649]: DEBUG nova.virt.hardware [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 570.079894] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e933a5-3ad2-4721-95f7-63c7a3ffe5a6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.087932] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2291fa01-be89-4fbb-b2a5-ab02e8bd75d2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.093571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4de844a6ab0349acb768ade23709784d [ 570.117322] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.117569] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.119050] env[61649]: INFO nova.compute.claims [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.120758] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg fb72de875fbe477e86dfb30082921b5e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.166880] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb72de875fbe477e86dfb30082921b5e [ 570.168641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 03e2625a8c11419bbe63e1e59cecea69 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.185536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03e2625a8c11419bbe63e1e59cecea69 [ 570.256472] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992fbdac-ff1b-4eec-a65c-ecf8ca87b2f4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.263669] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e634e49f-2a36-438b-ae11-076a7edda530 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.296201] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2847be4-8555-4035-a034-e96fef4ec348 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.306752] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84306d62-5e24-4615-987a-9ecb8fe644a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.327777] env[61649]: DEBUG nova.compute.provider_tree [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.327990] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg efd3a2e71f664e3788827a15def7225c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.335809] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efd3a2e71f664e3788827a15def7225c [ 570.336883] env[61649]: DEBUG nova.scheduler.client.report [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 570.339469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 9181b92d9d89414d912df2ac47027f74 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.351852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9181b92d9d89414d912df2ac47027f74 [ 570.352691] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.235s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.353179] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 570.354827] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 6c77cfd7e06e4eaa8e4d200b5cdf0895 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.398592] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c77cfd7e06e4eaa8e4d200b5cdf0895 [ 570.399729] env[61649]: DEBUG nova.compute.utils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.400371] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg ad8e70166ecf4af2bc26700e1469db09 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.401637] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 570.401637] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 570.411477] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad8e70166ecf4af2bc26700e1469db09 [ 570.412101] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 570.413905] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 71c9402ae6514867861ed6166be52ec1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.448467] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158088, 'name': CreateVM_Task, 'duration_secs': 0.498041} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.448646] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 570.451464] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71c9402ae6514867861ed6166be52ec1 [ 570.454580] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 9a167cdf67a840ca9f1424c23336543d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 570.490044] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a167cdf67a840ca9f1424c23336543d [ 570.491266] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 570.499454] env[61649]: DEBUG nova.policy [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fb2524a0c8840f58983c6c41fccd11e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35934b0d6fb84745955d3aad4935dd83', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 570.504387] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.504685] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.505104] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 570.505407] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f061ce-177a-444c-b56b-64bd747c879f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.511351] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Waiting for the task: (returnval){ [ 570.511351] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522299c9-afa4-9ee0-a747-7750411ea6db" [ 570.511351] env[61649]: _type = "Task" [ 570.511351] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.516542] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 570.516909] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 570.516909] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.517121] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 570.517314] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.517418] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 570.517622] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 570.517779] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 570.517941] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 570.518112] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 570.518277] env[61649]: DEBUG nova.virt.hardware [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 570.519331] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab533fd-0ea7-47b3-b953-9a4430f66b2d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.532083] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.532509] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 570.532757] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.536251] env[61649]: DEBUG nova.policy [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2476b5af394c34850545a4b52ec5da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1c53c3cb5314909973589bdec54d1b2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 570.540277] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62741fcf-bb46-4cf9-9219-a8343b8935d4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.510769] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "aacbebf5-bd31-465b-b574-6c4a98b27f30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.511039] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.511444] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 7920ead8ebe64eb985978df43cf89ddb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.523500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7920ead8ebe64eb985978df43cf89ddb [ 571.524018] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 571.525875] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 998457796ad840cf9d4db2d32f540400 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.560246] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 998457796ad840cf9d4db2d32f540400 [ 571.577035] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.577330] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.578785] env[61649]: INFO nova.compute.claims [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.580681] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 2d502d74590e46938fc7a562d2c01b29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.619157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d502d74590e46938fc7a562d2c01b29 [ 571.621209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 63a6c31b151a441ebfa503e3a89112d6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.631635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63a6c31b151a441ebfa503e3a89112d6 [ 571.733998] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5a9ce7-7700-45c2-b43f-6ad4f418c64a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.742373] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fd5ebc-738d-4754-b90a-1a85994b3fef {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.780434] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba6080c-f36e-40c1-ab80-1e668318213d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.789005] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee09329b-3b05-464d-a8e4-4b45eeecc63d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.804812] env[61649]: DEBUG nova.compute.provider_tree [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.805912] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 514a6cb6f543440ea62ee618e03d0f03 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.815310] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 514a6cb6f543440ea62ee618e03d0f03 [ 571.816376] env[61649]: DEBUG nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 571.819283] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg b07cf26694bc43c1bc6d834811143e4d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.831883] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b07cf26694bc43c1bc6d834811143e4d [ 571.832703] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.255s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.833190] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 571.835234] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg be7a299e966a463eb75af4586e54be9b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.868304] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be7a299e966a463eb75af4586e54be9b [ 571.870683] env[61649]: DEBUG nova.compute.utils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.871312] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg fab3d202841a4c9c9875b2b2c8c6310e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.874120] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 571.874120] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 571.884847] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fab3d202841a4c9c9875b2b2c8c6310e [ 571.885519] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "7d93caea-4740-4bcd-9f06-9397d37a07b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.885739] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Lock "7d93caea-4740-4bcd-9f06-9397d37a07b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.886187] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 3d5aa1ddd50546c6b218c9d27568b28a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.887074] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 571.888652] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 7913d8e3aa7e46a28451bcec3dc8da0b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.896077] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d5aa1ddd50546c6b218c9d27568b28a [ 571.896444] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 571.898045] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 93f73cc939c2494294ef0eed46047224 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.928551] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7913d8e3aa7e46a28451bcec3dc8da0b [ 571.930870] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg ddfd75249ca74a29a4811effd646dd6f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.931992] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93f73cc939c2494294ef0eed46047224 [ 571.948079] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.948337] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.949817] env[61649]: INFO nova.compute.claims [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.951413] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 559ecb19504144ef85b4bba804061b52 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.971202] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddfd75249ca74a29a4811effd646dd6f [ 571.972671] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 571.988024] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 559ecb19504144ef85b4bba804061b52 [ 571.988024] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg c7c6c77e5ee344bdb12a3e7705a57f3a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 571.997669] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.997956] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.998158] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.998384] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.998586] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.998769] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.999031] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.999420] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.999420] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.999632] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 572.000064] env[61649]: DEBUG nova.virt.hardware [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 572.000909] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7c6c77e5ee344bdb12a3e7705a57f3a [ 572.002288] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d359b912-9f6f-4380-81a4-c77903d07f6c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.017255] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c446eb6d-59f1-496b-bea9-46a02e5cb690 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.075671] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Successfully created port: 09c7ce06-9c39-4a22-a3bb-100dd69c2a32 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.103113] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4032e37-666b-46b8-9f27-89b5f56992ed {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.114266] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf183fd-e11f-4412-b779-c9d47ad9201f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.151858] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e566b7-909b-4511-a87d-bd27952823aa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.160852] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e641331-4f27-442b-961e-8c8c25c7b6f7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.166845] env[61649]: DEBUG nova.policy [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2f7368f8bb044ab9f08db792ee05338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33ec6b4a6e4d4d828bd5faa92126c92e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 572.180368] env[61649]: DEBUG nova.compute.provider_tree [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.180968] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg c7651939198040759ad168fb7792dbb1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 572.189168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7651939198040759ad168fb7792dbb1 [ 572.190475] env[61649]: DEBUG nova.scheduler.client.report [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 572.195796] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 184b4cf0bd6a48328adfe758a0a014af in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 572.207266] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 184b4cf0bd6a48328adfe758a0a014af [ 572.208070] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.260s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.208556] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 572.210230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 1f3584fcfcbf4a3380b6761e66a8b2b1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 572.245358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f3584fcfcbf4a3380b6761e66a8b2b1 [ 572.246612] env[61649]: DEBUG nova.compute.utils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.247248] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 69e33e6fb27c44c797a64af98c2b2f84 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 572.250286] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Not allocating networking since 'none' was specified. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 572.265110] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69e33e6fb27c44c797a64af98c2b2f84 [ 572.265802] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 572.267558] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 419561576c3c4ab190099b298be96b8a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 572.308402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 419561576c3c4ab190099b298be96b8a [ 572.311129] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 950251ac6b5542fa9726a6ff1a1d422c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 572.314225] env[61649]: DEBUG nova.compute.manager [req-43ae5f89-f3c9-4c07-9492-263d37d29679 req-51155952-c0c1-4458-bb9b-5022bb0b3c4e service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Received event network-vif-plugged-2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 572.314453] env[61649]: DEBUG oslo_concurrency.lockutils [req-43ae5f89-f3c9-4c07-9492-263d37d29679 req-51155952-c0c1-4458-bb9b-5022bb0b3c4e service nova] Acquiring lock "b6e68fe1-4ec8-4f0f-bc6b-168038b1998e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.314611] env[61649]: DEBUG oslo_concurrency.lockutils [req-43ae5f89-f3c9-4c07-9492-263d37d29679 req-51155952-c0c1-4458-bb9b-5022bb0b3c4e service nova] Lock "b6e68fe1-4ec8-4f0f-bc6b-168038b1998e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.314770] env[61649]: DEBUG oslo_concurrency.lockutils [req-43ae5f89-f3c9-4c07-9492-263d37d29679 req-51155952-c0c1-4458-bb9b-5022bb0b3c4e service nova] Lock "b6e68fe1-4ec8-4f0f-bc6b-168038b1998e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.314928] env[61649]: DEBUG nova.compute.manager [req-43ae5f89-f3c9-4c07-9492-263d37d29679 req-51155952-c0c1-4458-bb9b-5022bb0b3c4e service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] No waiting events found dispatching network-vif-plugged-2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 572.315491] env[61649]: WARNING nova.compute.manager [req-43ae5f89-f3c9-4c07-9492-263d37d29679 req-51155952-c0c1-4458-bb9b-5022bb0b3c4e service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Received unexpected event network-vif-plugged-2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 for instance with vm_state building and task_state spawning. [ 572.344280] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 950251ac6b5542fa9726a6ff1a1d422c [ 572.345891] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 572.372354] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 572.372608] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 572.372765] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.372997] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 572.373189] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.374009] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 572.374347] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 572.374527] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 572.374698] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 572.374861] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 572.375031] env[61649]: DEBUG nova.virt.hardware [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 572.375880] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5356f041-3b98-4429-9d42-33c9cccd0e82 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.385487] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d865c308-b6b0-4910-9cf7-a0ffe2a746b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.405819] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Instance VIF info [] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.413322] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Creating folder: Project (36e25054ec3d49c598325d3af94c3393). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.413908] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1c16e2e-27b4-4261-a7c8-2756810161bf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.430798] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Created folder: Project (36e25054ec3d49c598325d3af94c3393) in parent group-v51588. [ 572.430798] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Creating folder: Instances. Parent ref: group-v51595. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.430798] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9047f3ea-0d3a-4816-89dc-e0e58493934d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.439324] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Created folder: Instances in parent group-v51595. [ 572.439324] env[61649]: DEBUG oslo.service.loopingcall [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.439324] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.439494] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec69dd1a-9104-47f9-9536-27c4b90a8286 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.459454] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.459454] env[61649]: value = "task-158091" [ 572.459454] env[61649]: _type = "Task" [ 572.459454] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.466930] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158091, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.684533] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Successfully created port: 30aecfaa-83f3-4eef-93c6-436c0beaf134 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.968879] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158091, 'name': CreateVM_Task, 'duration_secs': 0.261429} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.968879] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 572.968879] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.969017] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.969276] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.969518] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-240692cb-5df8-4854-ac85-37d08294c097 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.974025] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Waiting for the task: (returnval){ [ 572.974025] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52d18dd3-4895-5164-28d4-a654d41fe1fe" [ 572.974025] env[61649]: _type = "Task" [ 572.974025] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.981779] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52d18dd3-4895-5164-28d4-a654d41fe1fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.485147] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.485649] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.486037] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.051748] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Successfully created port: f6f03f6f-624c-4154-847a-ea4f6674de15 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.801502] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.801726] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.802203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg a3092c629da647e7b45dab3d88592aa7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 574.818188] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3092c629da647e7b45dab3d88592aa7 [ 574.818709] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 574.820433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 392c4f717b7d406a8e333d53fecd025d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 574.845386] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Successfully updated port: 09c7ce06-9c39-4a22-a3bb-100dd69c2a32 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 574.845865] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 2090b1f8c46b493dabfdae8bad755d58 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 574.854338] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2090b1f8c46b493dabfdae8bad755d58 [ 574.854988] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquiring lock "refresh_cache-2c9b8ba9-193e-468f-bc4e-006ab413b374" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.855111] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquired lock "refresh_cache-2c9b8ba9-193e-468f-bc4e-006ab413b374" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.855249] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 574.855631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg b50f8489ee7946468821a61bfbec770b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 574.873901] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 392c4f717b7d406a8e333d53fecd025d [ 574.884835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b50f8489ee7946468821a61bfbec770b [ 574.890149] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.890393] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.891833] env[61649]: INFO nova.compute.claims [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.893381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg bbab3b7f03184209aab524ecf177e107 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 574.930594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbab3b7f03184209aab524ecf177e107 [ 574.932417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 164cbd0ec718465b86f7696626609e61 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 574.942689] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 164cbd0ec718465b86f7696626609e61 [ 575.027515] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 575.081517] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821f1a7b-e0cd-445b-b253-72523a0f6469 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.089763] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42805ad-8ca3-406c-a043-9e6bebca1488 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.121195] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530298eb-62cf-4cf9-a3c2-891cd268f9f6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.130937] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86be7b1c-871c-4774-97a1-577faf2aaeb1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.146092] env[61649]: DEBUG nova.compute.provider_tree [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.146725] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 8971f8f3956f4662b6ce17b46a0d8793 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.156725] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8971f8f3956f4662b6ce17b46a0d8793 [ 575.157754] env[61649]: DEBUG nova.scheduler.client.report [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 575.160090] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg a9bd6e85d46a4f0eaa48fafacfc17af5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.170373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9bd6e85d46a4f0eaa48fafacfc17af5 [ 575.173349] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.173349] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 575.173700] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg cfd042bb8b644d21a02c297202ce2922 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.212164] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfd042bb8b644d21a02c297202ce2922 [ 575.212164] env[61649]: DEBUG nova.compute.utils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.212164] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg e4307b6645b34305939cdf85fde5eb29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.212164] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 575.212164] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 575.231693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4307b6645b34305939cdf85fde5eb29 [ 575.231693] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 575.233370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg dbe31b2e3af7455a9766d855a5174129 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.274023] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbe31b2e3af7455a9766d855a5174129 [ 575.276646] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg e03fd0eadd524438a8496f802f684968 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.312771] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e03fd0eadd524438a8496f802f684968 [ 575.313919] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 575.337389] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.337389] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.337389] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.337607] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.339882] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.340122] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.340357] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.340531] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.340682] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.340828] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.340995] env[61649]: DEBUG nova.virt.hardware [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.342672] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d312ea-d5ec-400e-b42a-5fee65fd7986 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.352069] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da32d336-8595-4cd2-9ad2-afd47896070c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.575359] env[61649]: DEBUG nova.policy [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9adfb40d53d74004a33c395f48e136cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52e958256a8947ae85fbbb0c13ef4220', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 575.831857] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Successfully updated port: 30aecfaa-83f3-4eef-93c6-436c0beaf134 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 575.832394] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 71017550ed204b4bb7308ed10e0537e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.841480] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71017550ed204b4bb7308ed10e0537e7 [ 575.841480] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "refresh_cache-29281253-e489-48f5-b219-75ae984adb00" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.841480] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquired lock "refresh_cache-29281253-e489-48f5-b219-75ae984adb00" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.841480] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 575.841480] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg bd2e4f0e370141c78fafe1b3b5ee6148 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.850474] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd2e4f0e370141c78fafe1b3b5ee6148 [ 575.955000] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Updating instance_info_cache with network_info: [{"id": "09c7ce06-9c39-4a22-a3bb-100dd69c2a32", "address": "fa:16:3e:29:c8:fa", "network": {"id": "84fe0cc8-55f3-4b3f-99d7-f93bd7a68e42", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-703690495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35934b0d6fb84745955d3aad4935dd83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c7ce06-9c", "ovs_interfaceid": "09c7ce06-9c39-4a22-a3bb-100dd69c2a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.955529] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 8b24180dc4384d24bac550ae51fde3bf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.958466] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 575.962542] env[61649]: DEBUG nova.compute.manager [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Received event network-changed-2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 575.962730] env[61649]: DEBUG nova.compute.manager [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Refreshing instance network info cache due to event network-changed-2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 575.962944] env[61649]: DEBUG oslo_concurrency.lockutils [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] Acquiring lock "refresh_cache-b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.963088] env[61649]: DEBUG oslo_concurrency.lockutils [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] Acquired lock "refresh_cache-b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.963247] env[61649]: DEBUG nova.network.neutron [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Refreshing network info cache for port 2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 575.963698] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] Expecting reply to msg 898fd3d424204482b0f2b5e95238a808 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 575.968859] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b24180dc4384d24bac550ae51fde3bf [ 575.969377] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Releasing lock "refresh_cache-2c9b8ba9-193e-468f-bc4e-006ab413b374" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.969635] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Instance network_info: |[{"id": "09c7ce06-9c39-4a22-a3bb-100dd69c2a32", "address": "fa:16:3e:29:c8:fa", "network": {"id": "84fe0cc8-55f3-4b3f-99d7-f93bd7a68e42", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-703690495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35934b0d6fb84745955d3aad4935dd83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c7ce06-9c", "ovs_interfaceid": "09c7ce06-9c39-4a22-a3bb-100dd69c2a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 575.970026] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:c8:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09c7ce06-9c39-4a22-a3bb-100dd69c2a32', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 575.977301] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Creating folder: Project (35934b0d6fb84745955d3aad4935dd83). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 575.977837] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898fd3d424204482b0f2b5e95238a808 [ 575.978415] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a217cfb-1374-424c-884e-7ce0ec79ff36 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.996051] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Created folder: Project (35934b0d6fb84745955d3aad4935dd83) in parent group-v51588. [ 575.996051] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Creating folder: Instances. Parent ref: group-v51598. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 575.996051] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f4749ae-337f-4470-9a81-c4263ba8d931 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.005754] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Created folder: Instances in parent group-v51598. [ 576.005967] env[61649]: DEBUG oslo.service.loopingcall [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.009342] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 576.009342] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c131b13d-0681-4b73-9473-5c4b5df6994e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.042436] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 576.042436] env[61649]: value = "task-158094" [ 576.042436] env[61649]: _type = "Task" [ 576.042436] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.052359] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158094, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.147614] env[61649]: DEBUG nova.compute.manager [req-d6f5c793-5b7e-4062-bbe2-9b272037e5e4 req-87ceebca-6acd-4b19-a396-965d522f50d3 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Received event network-vif-plugged-09c7ce06-9c39-4a22-a3bb-100dd69c2a32 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 576.147887] env[61649]: DEBUG oslo_concurrency.lockutils [req-d6f5c793-5b7e-4062-bbe2-9b272037e5e4 req-87ceebca-6acd-4b19-a396-965d522f50d3 service nova] Acquiring lock "2c9b8ba9-193e-468f-bc4e-006ab413b374-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.148210] env[61649]: DEBUG oslo_concurrency.lockutils [req-d6f5c793-5b7e-4062-bbe2-9b272037e5e4 req-87ceebca-6acd-4b19-a396-965d522f50d3 service nova] Lock "2c9b8ba9-193e-468f-bc4e-006ab413b374-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.148290] env[61649]: DEBUG oslo_concurrency.lockutils [req-d6f5c793-5b7e-4062-bbe2-9b272037e5e4 req-87ceebca-6acd-4b19-a396-965d522f50d3 service nova] Lock "2c9b8ba9-193e-468f-bc4e-006ab413b374-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.148408] env[61649]: DEBUG nova.compute.manager [req-d6f5c793-5b7e-4062-bbe2-9b272037e5e4 req-87ceebca-6acd-4b19-a396-965d522f50d3 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] No waiting events found dispatching network-vif-plugged-09c7ce06-9c39-4a22-a3bb-100dd69c2a32 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 576.148567] env[61649]: WARNING nova.compute.manager [req-d6f5c793-5b7e-4062-bbe2-9b272037e5e4 req-87ceebca-6acd-4b19-a396-965d522f50d3 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Received unexpected event network-vif-plugged-09c7ce06-9c39-4a22-a3bb-100dd69c2a32 for instance with vm_state building and task_state spawning. [ 576.552913] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158094, 'name': CreateVM_Task, 'duration_secs': 0.327976} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.553092] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 576.553805] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.553963] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.554372] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 576.554621] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acdfc008-1de6-458b-b887-6ffbb12ca9b7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.559783] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Waiting for the task: (returnval){ [ 576.559783] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ed3a87-293f-d9da-356c-2970059ed790" [ 576.559783] env[61649]: _type = "Task" [ 576.559783] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.570717] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ed3a87-293f-d9da-356c-2970059ed790, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.708360] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Updating instance_info_cache with network_info: [{"id": "30aecfaa-83f3-4eef-93c6-436c0beaf134", "address": "fa:16:3e:d5:17:5b", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30aecfaa-83", "ovs_interfaceid": "30aecfaa-83f3-4eef-93c6-436c0beaf134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.709167] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg c75636fc2add4928b9463ea31d413c36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 576.723143] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c75636fc2add4928b9463ea31d413c36 [ 576.723832] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Releasing lock "refresh_cache-29281253-e489-48f5-b219-75ae984adb00" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.724199] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance network_info: |[{"id": "30aecfaa-83f3-4eef-93c6-436c0beaf134", "address": "fa:16:3e:d5:17:5b", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30aecfaa-83", "ovs_interfaceid": "30aecfaa-83f3-4eef-93c6-436c0beaf134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 576.724552] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:17:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30aecfaa-83f3-4eef-93c6-436c0beaf134', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 576.738273] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Creating folder: Project (f1c53c3cb5314909973589bdec54d1b2). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 576.738919] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de77f086-dabf-40d4-9e86-8bcc381e3b85 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.751954] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Created folder: Project (f1c53c3cb5314909973589bdec54d1b2) in parent group-v51588. [ 576.752093] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Creating folder: Instances. Parent ref: group-v51601. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 576.752329] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf85e532-d3a7-40fe-a2e4-556e18598bb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.762662] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Created folder: Instances in parent group-v51601. [ 576.762662] env[61649]: DEBUG oslo.service.loopingcall [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.762912] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29281253-e489-48f5-b219-75ae984adb00] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 576.763308] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c948aee-f48e-4f1a-95fa-66ff9235b64c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.800757] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 576.800757] env[61649]: value = "task-158097" [ 576.800757] env[61649]: _type = "Task" [ 576.800757] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.810819] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158097, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.072241] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.072704] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 577.073071] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.258175] env[61649]: DEBUG nova.network.neutron [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Updated VIF entry in instance network info cache for port 2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 577.258175] env[61649]: DEBUG nova.network.neutron [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Updating instance_info_cache with network_info: [{"id": "2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2", "address": "fa:16:3e:75:88:be", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a56cfa7-02", "ovs_interfaceid": "2a56cfa7-02aa-4ce0-ba1d-bc427919fbc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.258542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] Expecting reply to msg 1a4fd407e1ba49168fb4c10a4244e732 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 577.268048] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a4fd407e1ba49168fb4c10a4244e732 [ 577.268494] env[61649]: DEBUG oslo_concurrency.lockutils [req-b6a14c9c-a8f7-4c2b-adcd-7ec306c4a763 req-86eec9eb-5665-49b1-add3-f55853ae05f5 service nova] Releasing lock "refresh_cache-b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.316542] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158097, 'name': CreateVM_Task, 'duration_secs': 0.351535} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.316542] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29281253-e489-48f5-b219-75ae984adb00] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 577.317235] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.317547] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.317897] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 577.318608] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5f8e55e-0794-49e8-8aa8-2640616c0993 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.325884] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Waiting for the task: (returnval){ [ 577.325884] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]520ea0fe-2ed2-779f-481b-ede822067903" [ 577.325884] env[61649]: _type = "Task" [ 577.325884] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.352027] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]520ea0fe-2ed2-779f-481b-ede822067903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.368317] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Successfully updated port: f6f03f6f-624c-4154-847a-ea4f6674de15 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 577.368317] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 8977e26cbd0b45ca90b698d2db523df5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 577.379693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8977e26cbd0b45ca90b698d2db523df5 [ 577.379693] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "refresh_cache-aacbebf5-bd31-465b-b574-6c4a98b27f30" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.379693] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquired lock "refresh_cache-aacbebf5-bd31-465b-b574-6c4a98b27f30" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.379693] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 577.379693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg fef07d93d2c44a21983af9cd61dca561 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 577.387346] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fef07d93d2c44a21983af9cd61dca561 [ 577.595989] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 577.614715] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Successfully created port: 09bcc069-4b84-4f9f-8360-b7906d608415 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 577.837673] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.837921] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 577.838152] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.043489] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Updating instance_info_cache with network_info: [{"id": "f6f03f6f-624c-4154-847a-ea4f6674de15", "address": "fa:16:3e:83:12:b8", "network": {"id": "cf322de5-53c5-4644-ade1-8e6c1798faa8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1492084188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33ec6b4a6e4d4d828bd5faa92126c92e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f03f6f-62", "ovs_interfaceid": "f6f03f6f-624c-4154-847a-ea4f6674de15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.043996] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 0cbf86284b8f49aea774dd88bb98d4f9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.057035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cbf86284b8f49aea774dd88bb98d4f9 [ 578.057327] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Releasing lock "refresh_cache-aacbebf5-bd31-465b-b574-6c4a98b27f30" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.057615] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance network_info: |[{"id": "f6f03f6f-624c-4154-847a-ea4f6674de15", "address": "fa:16:3e:83:12:b8", "network": {"id": "cf322de5-53c5-4644-ade1-8e6c1798faa8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1492084188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33ec6b4a6e4d4d828bd5faa92126c92e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f03f6f-62", "ovs_interfaceid": "f6f03f6f-624c-4154-847a-ea4f6674de15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 578.057997] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:12:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6f03f6f-624c-4154-847a-ea4f6674de15', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.065805] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Creating folder: Project (33ec6b4a6e4d4d828bd5faa92126c92e). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 578.066418] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-908159b6-da82-456a-a8ad-9fd8c61c3478 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.082284] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Created folder: Project (33ec6b4a6e4d4d828bd5faa92126c92e) in parent group-v51588. [ 578.082284] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Creating folder: Instances. Parent ref: group-v51604. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 578.082421] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7589518-f13a-4095-96bb-89e1a6fb1006 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.094176] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Created folder: Instances in parent group-v51604. [ 578.094176] env[61649]: DEBUG oslo.service.loopingcall [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.094176] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 578.094176] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42018899-52e3-4dd5-ba59-b68317e21b4c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.118196] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.118196] env[61649]: value = "task-158100" [ 578.118196] env[61649]: _type = "Task" [ 578.118196] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.130399] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158100, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.423916] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.424352] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.424662] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg aebede65f371444388dc86d34874e5b8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.437334] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aebede65f371444388dc86d34874e5b8 [ 578.437879] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 578.439789] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 9d02f26516054b238a05f3c26731da8c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.479878] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d02f26516054b238a05f3c26731da8c [ 578.502587] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.502587] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.503442] env[61649]: INFO nova.compute.claims [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.505254] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 1ff6a1b2ee1f4c449cb22e6eb03f8df9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.547397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ff6a1b2ee1f4c449cb22e6eb03f8df9 [ 578.549241] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 5c0ba82b6f9e480c98030a114d2df9d1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.557347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c0ba82b6f9e480c98030a114d2df9d1 [ 578.631179] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158100, 'name': CreateVM_Task, 'duration_secs': 0.341782} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.631325] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 578.634374] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.634578] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.634913] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 578.635436] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-059bef24-0de0-4716-8a07-dba9822b9baa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.644748] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Waiting for the task: (returnval){ [ 578.644748] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c38300-3821-8968-ad80-9d449ba147c6" [ 578.644748] env[61649]: _type = "Task" [ 578.644748] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.658358] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c38300-3821-8968-ad80-9d449ba147c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.704872] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91137e34-6681-41ab-b9c6-068a37563e3c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.713405] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53a53b8-ff0d-4558-a90d-e29be11276ab {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.748181] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f60cd6-a75b-448b-b889-fac3aa5dac24 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.758556] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6559122-2096-4a68-abb5-0f6af1ddf0cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.764890] env[61649]: DEBUG nova.compute.manager [req-f9c3bf9f-dec4-40cb-a12c-3a440450b8c8 req-f1a5470c-cfea-497f-9714-65d10b1779cf service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Received event network-vif-plugged-f6f03f6f-624c-4154-847a-ea4f6674de15 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 578.765117] env[61649]: DEBUG oslo_concurrency.lockutils [req-f9c3bf9f-dec4-40cb-a12c-3a440450b8c8 req-f1a5470c-cfea-497f-9714-65d10b1779cf service nova] Acquiring lock "aacbebf5-bd31-465b-b574-6c4a98b27f30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.765313] env[61649]: DEBUG oslo_concurrency.lockutils [req-f9c3bf9f-dec4-40cb-a12c-3a440450b8c8 req-f1a5470c-cfea-497f-9714-65d10b1779cf service nova] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.765477] env[61649]: DEBUG oslo_concurrency.lockutils [req-f9c3bf9f-dec4-40cb-a12c-3a440450b8c8 req-f1a5470c-cfea-497f-9714-65d10b1779cf service nova] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.765755] env[61649]: DEBUG nova.compute.manager [req-f9c3bf9f-dec4-40cb-a12c-3a440450b8c8 req-f1a5470c-cfea-497f-9714-65d10b1779cf service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] No waiting events found dispatching network-vif-plugged-f6f03f6f-624c-4154-847a-ea4f6674de15 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 578.765828] env[61649]: WARNING nova.compute.manager [req-f9c3bf9f-dec4-40cb-a12c-3a440450b8c8 req-f1a5470c-cfea-497f-9714-65d10b1779cf service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Received unexpected event network-vif-plugged-f6f03f6f-624c-4154-847a-ea4f6674de15 for instance with vm_state building and task_state spawning. [ 578.775215] env[61649]: DEBUG nova.compute.provider_tree [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.775385] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 41c98da5d16d488a8cdffb066475a1c6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.782717] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41c98da5d16d488a8cdffb066475a1c6 [ 578.784562] env[61649]: DEBUG nova.scheduler.client.report [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 578.786973] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 2fa0a71c82ee40b99b2486fa928f4479 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.799380] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fa0a71c82ee40b99b2486fa928f4479 [ 578.800204] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.800798] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 578.803479] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 704e57b4e2504ce5b965356cbd5ff023 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.865322] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 704e57b4e2504ce5b965356cbd5ff023 [ 578.866759] env[61649]: DEBUG nova.compute.utils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 578.867377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 4d31600e78b944a690d9eb117249e012 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.868216] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 578.868382] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 578.883699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d31600e78b944a690d9eb117249e012 [ 578.884353] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 578.886083] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 3eb620744d584084b15cf17eaa323eb6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.924117] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3eb620744d584084b15cf17eaa323eb6 [ 578.925037] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg ed8d2ffa80d54d8d9115a2d1c1b9a661 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 578.961135] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8d2ffa80d54d8d9115a2d1c1b9a661 [ 578.962339] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 578.985161] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.985402] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.985563] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.985745] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.985893] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.986041] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.986247] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.986404] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.986569] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.986765] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.986941] env[61649]: DEBUG nova.virt.hardware [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.988111] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05a2d88-1108-47ec-b37f-7d59a4dacae1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.996999] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642a45bc-eac4-4208-a13d-e161918dfbde {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.033691] env[61649]: DEBUG nova.policy [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '895fac3191624cde945e0b373c5c810d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6758fd3414654235a7cc71acfdefa72d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 579.157042] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.157389] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.158054] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.185513] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Successfully updated port: 09bcc069-4b84-4f9f-8360-b7906d608415 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.186300] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 776a1eaa6a3c4535979591f7f7bb1d86 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 579.205603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 776a1eaa6a3c4535979591f7f7bb1d86 [ 579.205603] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "refresh_cache-eb0c04e3-1234-445c-bfa6-e031dd0b89d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.205603] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquired lock "refresh_cache-eb0c04e3-1234-445c-bfa6-e031dd0b89d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.205603] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 579.205603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg eeb9def367ea4616880ea76ec357798d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 579.212831] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eeb9def367ea4616880ea76ec357798d [ 579.528837] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 579.560656] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Successfully created port: ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.209516] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Updating instance_info_cache with network_info: [{"id": "09bcc069-4b84-4f9f-8360-b7906d608415", "address": "fa:16:3e:e9:80:d0", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09bcc069-4b", "ovs_interfaceid": "09bcc069-4b84-4f9f-8360-b7906d608415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.209516] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 292bc9e2d47c4a10903f09bec8237485 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 580.228176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 292bc9e2d47c4a10903f09bec8237485 [ 580.228856] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Releasing lock "refresh_cache-eb0c04e3-1234-445c-bfa6-e031dd0b89d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.229202] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance network_info: |[{"id": "09bcc069-4b84-4f9f-8360-b7906d608415", "address": "fa:16:3e:e9:80:d0", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09bcc069-4b", "ovs_interfaceid": "09bcc069-4b84-4f9f-8360-b7906d608415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 580.229584] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:80:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09bcc069-4b84-4f9f-8360-b7906d608415', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 580.248679] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Creating folder: Project (52e958256a8947ae85fbbb0c13ef4220). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 580.249903] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d66141fe-8668-4313-ae1b-e3a4ceae8720 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.265603] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Created folder: Project (52e958256a8947ae85fbbb0c13ef4220) in parent group-v51588. [ 580.266326] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Creating folder: Instances. Parent ref: group-v51607. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 580.266326] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af498d21-1f9b-4be9-ba8b-0fe928ced530 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.274981] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Created folder: Instances in parent group-v51607. [ 580.275232] env[61649]: DEBUG oslo.service.loopingcall [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 580.275411] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 580.275603] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-437c1796-4973-45c7-bca0-7198ed1fae55 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.297803] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 580.297803] env[61649]: value = "task-158103" [ 580.297803] env[61649]: _type = "Task" [ 580.297803] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.303711] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158103, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.491153] env[61649]: DEBUG nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Received event network-vif-plugged-30aecfaa-83f3-4eef-93c6-436c0beaf134 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 580.491276] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Acquiring lock "29281253-e489-48f5-b219-75ae984adb00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.491517] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Lock "29281253-e489-48f5-b219-75ae984adb00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.491680] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Lock "29281253-e489-48f5-b219-75ae984adb00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.491849] env[61649]: DEBUG nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] No waiting events found dispatching network-vif-plugged-30aecfaa-83f3-4eef-93c6-436c0beaf134 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 580.492025] env[61649]: WARNING nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Received unexpected event network-vif-plugged-30aecfaa-83f3-4eef-93c6-436c0beaf134 for instance with vm_state building and task_state spawning. [ 580.492206] env[61649]: DEBUG nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Received event network-changed-30aecfaa-83f3-4eef-93c6-436c0beaf134 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 580.492359] env[61649]: DEBUG nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Refreshing instance network info cache due to event network-changed-30aecfaa-83f3-4eef-93c6-436c0beaf134. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 580.492537] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Acquiring lock "refresh_cache-29281253-e489-48f5-b219-75ae984adb00" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.492665] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Acquired lock "refresh_cache-29281253-e489-48f5-b219-75ae984adb00" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.492814] env[61649]: DEBUG nova.network.neutron [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Refreshing network info cache for port 30aecfaa-83f3-4eef-93c6-436c0beaf134 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 580.493314] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Expecting reply to msg bd44c02df81f4ceba0b7831367fdb373 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 580.503227] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd44c02df81f4ceba0b7831367fdb373 [ 580.740816] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "00931111-13a1-447d-a401-943221badd59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.741143] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "00931111-13a1-447d-a401-943221badd59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.741531] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 91139602c9ae491a869a802f15f40cb2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 580.752958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91139602c9ae491a869a802f15f40cb2 [ 580.753424] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 580.755063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 81ae8787936d43ebbcd8084be9d4082f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 580.791351] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81ae8787936d43ebbcd8084be9d4082f [ 580.806527] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158103, 'name': CreateVM_Task, 'duration_secs': 0.289201} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.807791] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.808111] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.809422] env[61649]: INFO nova.compute.claims [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 580.813065] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg bb5249ddeca346f8ad1ee5cb8ce51f0f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 580.813065] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 580.813065] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.813065] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.813065] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 580.813232] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eb5d8bc-a1b3-48d3-af89-20e267fa969a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.818686] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Waiting for the task: (returnval){ [ 580.818686] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52bd9a61-21df-291d-35ba-9c9f5aa885e1" [ 580.818686] env[61649]: _type = "Task" [ 580.818686] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.826505] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52bd9a61-21df-291d-35ba-9c9f5aa885e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.851510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb5249ddeca346f8ad1ee5cb8ce51f0f [ 580.853318] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg ba0b22c918b54498b1e7c2acb6183988 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 580.861489] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba0b22c918b54498b1e7c2acb6183988 [ 581.022117] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8561a4c8-c9f5-433f-af65-1622421bfe30 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.030060] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b24cfa-634b-4bcf-bed4-46e119f1ebc5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.066404] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa29a2f6-eab4-4fbb-93b0-62dd0d28b915 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.074103] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906965ea-34ef-4fd8-9f48-56a402c11350 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.087688] env[61649]: DEBUG nova.compute.provider_tree [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.088332] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg a9f886f70b8f4dba894cc7e7ef477a80 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.095569] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9f886f70b8f4dba894cc7e7ef477a80 [ 581.096533] env[61649]: DEBUG nova.scheduler.client.report [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 581.098794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg e3979137985540e5ae200a94ac930b79 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.125035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3979137985540e5ae200a94ac930b79 [ 581.125942] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.126417] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 581.128200] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 5099c0802d8846418bad59d4ff8460c9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.166880] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5099c0802d8846418bad59d4ff8460c9 [ 581.168270] env[61649]: DEBUG nova.compute.utils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 581.169153] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 587a3e217ec444aeacb8608320923058 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.170170] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 581.170170] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 581.180672] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587a3e217ec444aeacb8608320923058 [ 581.181342] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 581.183193] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg e98ab3f188824be7a8c585459f2a77f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.218647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e98ab3f188824be7a8c585459f2a77f1 [ 581.218647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 8ee05beb2b9545bab8a4a46ee04d47d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.301295] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ee05beb2b9545bab8a4a46ee04d47d2 [ 581.302535] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 581.327993] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.328364] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.328701] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.329077] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.329301] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.329509] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.329780] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.330029] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.330274] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.330493] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.330612] env[61649]: DEBUG nova.virt.hardware [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.331577] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f475e01-8db0-4c4e-914b-3b1ea7e73459 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.340223] env[61649]: DEBUG nova.compute.manager [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Received event network-changed-09c7ce06-9c39-4a22-a3bb-100dd69c2a32 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 581.340324] env[61649]: DEBUG nova.compute.manager [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Refreshing instance network info cache due to event network-changed-09c7ce06-9c39-4a22-a3bb-100dd69c2a32. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 581.342285] env[61649]: DEBUG oslo_concurrency.lockutils [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] Acquiring lock "refresh_cache-2c9b8ba9-193e-468f-bc4e-006ab413b374" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.342554] env[61649]: DEBUG oslo_concurrency.lockutils [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] Acquired lock "refresh_cache-2c9b8ba9-193e-468f-bc4e-006ab413b374" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.342776] env[61649]: DEBUG nova.network.neutron [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Refreshing network info cache for port 09c7ce06-9c39-4a22-a3bb-100dd69c2a32 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 581.347714] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] Expecting reply to msg 52c8cd1e77e54d339aed603dd11551ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.351173] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.351465] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 581.354241] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.362274] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8242c519-f85d-490d-9313-0eb57374e9af {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.362563] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52c8cd1e77e54d339aed603dd11551ce [ 581.427853] env[61649]: DEBUG nova.policy [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6104986075644974aaa80bb3907bf2d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b94780269444437fa1c7ff05a9723f42', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 581.501671] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "99f9912a-edf0-40f5-a7ce-55767081705b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.501994] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "99f9912a-edf0-40f5-a7ce-55767081705b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.502479] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 0ef1b6a3e0904eb18153b24c936c017c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.516032] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ef1b6a3e0904eb18153b24c936c017c [ 581.516032] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 581.517682] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 307efd314fb84a7db6deb5c8d9d2479d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.570035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 307efd314fb84a7db6deb5c8d9d2479d [ 581.594103] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.594484] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.595848] env[61649]: INFO nova.compute.claims [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.597608] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 6d40e9ba24444cac9ee3b6a677bbd639 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.658857] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d40e9ba24444cac9ee3b6a677bbd639 [ 581.660847] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 6336b48ece0842efaa84d373f700d244 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.672708] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6336b48ece0842efaa84d373f700d244 [ 581.714638] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Successfully updated port: ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 581.715141] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 9251dc43891d4cecb6e8b772886d70cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.729466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9251dc43891d4cecb6e8b772886d70cb [ 581.729978] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "refresh_cache-fd0ac9db-adc2-46f2-93ff-0b7e299534a7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.730129] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquired lock "refresh_cache-fd0ac9db-adc2-46f2-93ff-0b7e299534a7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.730299] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 581.730788] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg d14179bd4c1440b2b59643f654996aa3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.738661] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d14179bd4c1440b2b59643f654996aa3 [ 581.852550] env[61649]: DEBUG nova.network.neutron [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Updated VIF entry in instance network info cache for port 30aecfaa-83f3-4eef-93c6-436c0beaf134. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 581.852904] env[61649]: DEBUG nova.network.neutron [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: 29281253-e489-48f5-b219-75ae984adb00] Updating instance_info_cache with network_info: [{"id": "30aecfaa-83f3-4eef-93c6-436c0beaf134", "address": "fa:16:3e:d5:17:5b", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.99", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30aecfaa-83", "ovs_interfaceid": "30aecfaa-83f3-4eef-93c6-436c0beaf134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.853378] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Expecting reply to msg 90714c74e0454de28f3482beeb0931e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.865737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90714c74e0454de28f3482beeb0931e3 [ 581.866060] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Releasing lock "refresh_cache-29281253-e489-48f5-b219-75ae984adb00" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.866297] env[61649]: DEBUG nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Received event network-changed-f6f03f6f-624c-4154-847a-ea4f6674de15 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 581.866467] env[61649]: DEBUG nova.compute.manager [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Refreshing instance network info cache due to event network-changed-f6f03f6f-624c-4154-847a-ea4f6674de15. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 581.866672] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Acquiring lock "refresh_cache-aacbebf5-bd31-465b-b574-6c4a98b27f30" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.866825] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Acquired lock "refresh_cache-aacbebf5-bd31-465b-b574-6c4a98b27f30" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.866995] env[61649]: DEBUG nova.network.neutron [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Refreshing network info cache for port f6f03f6f-624c-4154-847a-ea4f6674de15 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 581.867485] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Expecting reply to msg 3743a967cd2e48c0bb85d27104119e29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.871314] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 581.875025] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5045b21d-e2f1-4769-b0cf-759ad8857461 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.877856] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3743a967cd2e48c0bb85d27104119e29 [ 581.886019] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faad73f4-20a1-4cb2-a49c-e7948cea6f4c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.919107] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef35ebfb-84d6-4bae-9a61-c39759dcda3c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.927052] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddcf8aa-67c3-4764-b52d-914f5c7666bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.943094] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.943634] env[61649]: DEBUG nova.compute.provider_tree [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.944132] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 12aa4cd1f95346cea4a2017c2a2a9e03 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.945171] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.945381] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 581.945504] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 581.946023] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8356309194f14c26b44790aa2909a831 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.954600] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12aa4cd1f95346cea4a2017c2a2a9e03 [ 581.956502] env[61649]: DEBUG nova.scheduler.client.report [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 581.957738] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 832b6ce245dc449b9a0f11c3ee0572d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.968786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8356309194f14c26b44790aa2909a831 [ 581.971521] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.971669] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.971798] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29281253-e489-48f5-b219-75ae984adb00] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972179] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972373] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972501] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972622] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972739] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972853] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.972968] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 581.973094] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 581.973631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 832b6ce245dc449b9a0f11c3ee0572d5 [ 581.973979] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.974589] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.380s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.975013] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 581.976626] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 93ce30dd0a4040648c2aa300d854e4a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.977542] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.978135] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.978345] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.978532] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.978712] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.978875] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 581.979036] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 581.979417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8a95bc50b70f40cdaf1f0bc0e6722539 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 581.989295] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a95bc50b70f40cdaf1f0bc0e6722539 [ 581.990185] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.990522] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.990695] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.990850] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 581.992428] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8fc1bc-e1f3-4e24-bf1e-98599a28cc89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.003977] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b96940-3bde-40f5-bf33-d6afcb5e6931 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.009106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93ce30dd0a4040648c2aa300d854e4a9 [ 582.010355] env[61649]: DEBUG nova.compute.utils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.011949] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 4628674f11a848d8bbee757ad8c78669 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.020849] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 582.021081] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 582.023571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4628674f11a848d8bbee757ad8c78669 [ 582.024407] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 582.025973] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg fc2435647f6044279b08f76263b514ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.027686] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90730e84-24ac-44b8-a5fb-3cd9fee94070 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.035883] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdc1553-c10f-41d5-b2bc-3633dda01861 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.069213] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181804MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 582.069381] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.069818] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.071038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ea431b0876664d1b907256911b77238a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.079622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc2435647f6044279b08f76263b514ab [ 582.082645] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg ce92abfcc816425e9ef16deebe12a0a8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.108569] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea431b0876664d1b907256911b77238a [ 582.115171] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7e21ce57d4e24d4292810c0aa3f6dbbb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.119958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce92abfcc816425e9ef16deebe12a0a8 [ 582.120703] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 582.137237] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e21ce57d4e24d4292810c0aa3f6dbbb [ 582.155536] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.155766] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.155918] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.156114] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.156261] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.156403] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.156602] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.156755] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.156915] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.157099] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.157331] env[61649]: DEBUG nova.virt.hardware [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.158402] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3959e31c-8a4c-4b1a-af0c-ac6e494fec7d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.162360] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 683996e1-4e16-4add-8fa6-3c2843ebbf21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.162505] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6e68fe1-4ec8-4f0f-bc6b-168038b1998e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.162640] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29281253-e489-48f5-b219-75ae984adb00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.162749] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2c9b8ba9-193e-468f-bc4e-006ab413b374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.162920] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aacbebf5-bd31-465b-b574-6c4a98b27f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.162979] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7d93caea-4740-4bcd-9f06-9397d37a07b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.163362] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.163362] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.163362] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.163495] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 582.163592] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 582.163728] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 582.171261] env[61649]: DEBUG nova.policy [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f9c51cfef1646d4986c4cea34966be4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a70e175710d40f1b889d65c5eaca043', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 582.173677] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24946f72-112a-4d1f-bc30-dbdce6ff383d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.326136] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0badbd-e67f-4664-8e20-7d163b0c431c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.333913] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a790c417-51e3-4377-a387-ca6f4ea65243 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.367979] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76746a7-9858-4498-a97a-b2b39f48e52d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.377087] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ec94b9-994c-41be-9e25-ca908ca8aed0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.391496] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.391983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 306a85b799d244008737d6d3f785039f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.400768] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 306a85b799d244008737d6d3f785039f [ 582.400768] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 582.403709] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fd16203965394c698e03801b5c5db3eb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.416292] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd16203965394c698e03801b5c5db3eb [ 582.416903] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 582.417222] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.347s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.656483] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "7f9f2074-6822-4d9d-9791-4bebc7e55862" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.656712] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.776983] env[61649]: DEBUG nova.network.neutron [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Updated VIF entry in instance network info cache for port 09c7ce06-9c39-4a22-a3bb-100dd69c2a32. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 582.777620] env[61649]: DEBUG nova.network.neutron [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Updating instance_info_cache with network_info: [{"id": "09c7ce06-9c39-4a22-a3bb-100dd69c2a32", "address": "fa:16:3e:29:c8:fa", "network": {"id": "84fe0cc8-55f3-4b3f-99d7-f93bd7a68e42", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-703690495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35934b0d6fb84745955d3aad4935dd83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8bbebaf-0fb5-42ae-8d4f-ecd4f46d0244", "external-id": "nsx-vlan-transportzone-296", "segmentation_id": 296, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c7ce06-9c", "ovs_interfaceid": "09c7ce06-9c39-4a22-a3bb-100dd69c2a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.778189] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] Expecting reply to msg 11b826ec0f294349986d04b9ad815c6f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 582.788172] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11b826ec0f294349986d04b9ad815c6f [ 582.788818] env[61649]: DEBUG oslo_concurrency.lockutils [req-02ecd990-897b-4d22-83fe-e3f4f9235f63 req-088c0a25-0f89-499d-9142-4dfef09f2274 service nova] Releasing lock "refresh_cache-2c9b8ba9-193e-468f-bc4e-006ab413b374" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.066056] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Successfully created port: 06fece4f-69a0-4370-8c8f-4f7895e7f5f8 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.245778] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Updating instance_info_cache with network_info: [{"id": "ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45", "address": "fa:16:3e:d0:3b:1f", "network": {"id": "796a0a57-5bc9-4906-b039-3342b997f972", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-191920708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6758fd3414654235a7cc71acfdefa72d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc0c8e2-6e", "ovs_interfaceid": "ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.246256] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg af0c73f7de6e41e4a371695600552eea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 583.256737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af0c73f7de6e41e4a371695600552eea [ 583.257349] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Releasing lock "refresh_cache-fd0ac9db-adc2-46f2-93ff-0b7e299534a7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.257642] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance network_info: |[{"id": "ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45", "address": "fa:16:3e:d0:3b:1f", "network": {"id": "796a0a57-5bc9-4906-b039-3342b997f972", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-191920708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6758fd3414654235a7cc71acfdefa72d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc0c8e2-6e", "ovs_interfaceid": "ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 583.258017] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:3b:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 583.265566] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Creating folder: Project (6758fd3414654235a7cc71acfdefa72d). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 583.266109] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4659e6f5-e48f-4ee5-bb97-5d194a604043 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.276513] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Created folder: Project (6758fd3414654235a7cc71acfdefa72d) in parent group-v51588. [ 583.276701] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Creating folder: Instances. Parent ref: group-v51610. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 583.276943] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7daa6cf0-8ff0-445c-9efd-8fd271ab44bc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.285674] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Created folder: Instances in parent group-v51610. [ 583.285967] env[61649]: DEBUG oslo.service.loopingcall [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 583.286159] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 583.286351] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31f1bb24-dd10-4995-9ec0-7e564e3c4617 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.305782] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 583.305782] env[61649]: value = "task-158106" [ 583.305782] env[61649]: _type = "Task" [ 583.305782] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.314013] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158106, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.314840] env[61649]: DEBUG nova.network.neutron [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Updated VIF entry in instance network info cache for port f6f03f6f-624c-4154-847a-ea4f6674de15. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 583.315133] env[61649]: DEBUG nova.network.neutron [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Updating instance_info_cache with network_info: [{"id": "f6f03f6f-624c-4154-847a-ea4f6674de15", "address": "fa:16:3e:83:12:b8", "network": {"id": "cf322de5-53c5-4644-ade1-8e6c1798faa8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1492084188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33ec6b4a6e4d4d828bd5faa92126c92e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f03f6f-62", "ovs_interfaceid": "f6f03f6f-624c-4154-847a-ea4f6674de15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.315619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Expecting reply to msg c2d189089c014b7a8c5f54fb4690bbe0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 583.324151] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2d189089c014b7a8c5f54fb4690bbe0 [ 583.324997] env[61649]: DEBUG oslo_concurrency.lockutils [req-7b6cca55-f40a-4999-ac56-45d2565d1df0 req-28bf4c1a-e073-43d9-9f68-4d372d48b816 service nova] Releasing lock "refresh_cache-aacbebf5-bd31-465b-b574-6c4a98b27f30" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.815916] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158106, 'name': CreateVM_Task, 'duration_secs': 0.315463} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.816055] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 583.816738] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.816813] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.817143] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 583.817403] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-096cdb32-07c6-4c08-aaa7-629cf75c0a55 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.824431] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Waiting for the task: (returnval){ [ 583.824431] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c04f89-c8b9-ac89-562f-691848f1aa44" [ 583.824431] env[61649]: _type = "Task" [ 583.824431] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.839637] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c04f89-c8b9-ac89-562f-691848f1aa44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.857678] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Successfully created port: ba529c28-ed7b-4228-96eb-a795fa6b4f90 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.337564] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.337797] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 584.338006] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.755829] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Successfully updated port: 06fece4f-69a0-4370-8c8f-4f7895e7f5f8 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 584.756393] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 6008594a785f4cb1b7c5c8d1752a2ade in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 584.765339] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6008594a785f4cb1b7c5c8d1752a2ade [ 584.766081] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "refresh_cache-00931111-13a1-447d-a401-943221badd59" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.766209] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquired lock "refresh_cache-00931111-13a1-447d-a401-943221badd59" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.766356] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 584.766757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 8307de949c4f430e824390d8180e4b02 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 584.773504] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8307de949c4f430e824390d8180e4b02 [ 584.987590] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 585.294230] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "bf8c692f-6510-4548-aedd-0e1792512e20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.294601] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "bf8c692f-6510-4548-aedd-0e1792512e20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.511713] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Updating instance_info_cache with network_info: [{"id": "06fece4f-69a0-4370-8c8f-4f7895e7f5f8", "address": "fa:16:3e:5a:7e:01", "network": {"id": "d523315a-b523-4024-9606-3990772ae627", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-616506589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b94780269444437fa1c7ff05a9723f42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06fece4f-69", "ovs_interfaceid": "06fece4f-69a0-4370-8c8f-4f7895e7f5f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.512155] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 41f016ad679b4047bf180dc650a5ec4b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 585.522180] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41f016ad679b4047bf180dc650a5ec4b [ 585.522875] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Releasing lock "refresh_cache-00931111-13a1-447d-a401-943221badd59" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.523079] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance network_info: |[{"id": "06fece4f-69a0-4370-8c8f-4f7895e7f5f8", "address": "fa:16:3e:5a:7e:01", "network": {"id": "d523315a-b523-4024-9606-3990772ae627", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-616506589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b94780269444437fa1c7ff05a9723f42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06fece4f-69", "ovs_interfaceid": "06fece4f-69a0-4370-8c8f-4f7895e7f5f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 585.523466] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:7e:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06fece4f-69a0-4370-8c8f-4f7895e7f5f8', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.531345] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Creating folder: Project (b94780269444437fa1c7ff05a9723f42). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.532555] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e9fafe8-ed98-40be-9297-183f787894fd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.544230] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Created folder: Project (b94780269444437fa1c7ff05a9723f42) in parent group-v51588. [ 585.544886] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Creating folder: Instances. Parent ref: group-v51613. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.544886] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-485c4f02-3a8b-4d06-8092-754882266614 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.554549] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Created folder: Instances in parent group-v51613. [ 585.554776] env[61649]: DEBUG oslo.service.loopingcall [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.554985] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00931111-13a1-447d-a401-943221badd59] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 585.555172] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87de814d-dbc7-4937-8221-35f0c719c1ce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.574352] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Received event network-vif-plugged-09bcc069-4b84-4f9f-8360-b7906d608415 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 585.574535] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Acquiring lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.574568] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.574974] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.574974] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] No waiting events found dispatching network-vif-plugged-09bcc069-4b84-4f9f-8360-b7906d608415 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 585.575088] env[61649]: WARNING nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Received unexpected event network-vif-plugged-09bcc069-4b84-4f9f-8360-b7906d608415 for instance with vm_state building and task_state spawning. [ 585.575195] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Received event network-changed-09bcc069-4b84-4f9f-8360-b7906d608415 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 585.575334] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Refreshing instance network info cache due to event network-changed-09bcc069-4b84-4f9f-8360-b7906d608415. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 585.575512] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Acquiring lock "refresh_cache-eb0c04e3-1234-445c-bfa6-e031dd0b89d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.575718] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Acquired lock "refresh_cache-eb0c04e3-1234-445c-bfa6-e031dd0b89d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.575786] env[61649]: DEBUG nova.network.neutron [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Refreshing network info cache for port 09bcc069-4b84-4f9f-8360-b7906d608415 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 585.576351] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Expecting reply to msg 83b0c73f38a2483780aa74cecb94de11 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 585.582506] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.582506] env[61649]: value = "task-158109" [ 585.582506] env[61649]: _type = "Task" [ 585.582506] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.586938] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83b0c73f38a2483780aa74cecb94de11 [ 585.593217] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158109, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.093025] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158109, 'name': CreateVM_Task, 'duration_secs': 0.30991} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.093402] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00931111-13a1-447d-a401-943221badd59] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 586.094196] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.094626] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.095080] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 586.095485] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73341daf-acaa-4687-99f6-65f727bc3b13 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.100974] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Waiting for the task: (returnval){ [ 586.100974] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]523663fc-2b02-7234-e4c1-ee6e81d9ee00" [ 586.100974] env[61649]: _type = "Task" [ 586.100974] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.110314] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]523663fc-2b02-7234-e4c1-ee6e81d9ee00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.358647] env[61649]: DEBUG nova.compute.manager [req-64a3716c-9173-4df2-b920-8065a69f349e req-def4f386-f877-420b-9429-99d49f9ec380 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Received event network-vif-plugged-06fece4f-69a0-4370-8c8f-4f7895e7f5f8 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 586.359055] env[61649]: DEBUG oslo_concurrency.lockutils [req-64a3716c-9173-4df2-b920-8065a69f349e req-def4f386-f877-420b-9429-99d49f9ec380 service nova] Acquiring lock "00931111-13a1-447d-a401-943221badd59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.359655] env[61649]: DEBUG oslo_concurrency.lockutils [req-64a3716c-9173-4df2-b920-8065a69f349e req-def4f386-f877-420b-9429-99d49f9ec380 service nova] Lock "00931111-13a1-447d-a401-943221badd59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.359976] env[61649]: DEBUG oslo_concurrency.lockutils [req-64a3716c-9173-4df2-b920-8065a69f349e req-def4f386-f877-420b-9429-99d49f9ec380 service nova] Lock "00931111-13a1-447d-a401-943221badd59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.360363] env[61649]: DEBUG nova.compute.manager [req-64a3716c-9173-4df2-b920-8065a69f349e req-def4f386-f877-420b-9429-99d49f9ec380 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] No waiting events found dispatching network-vif-plugged-06fece4f-69a0-4370-8c8f-4f7895e7f5f8 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 586.360641] env[61649]: WARNING nova.compute.manager [req-64a3716c-9173-4df2-b920-8065a69f349e req-def4f386-f877-420b-9429-99d49f9ec380 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Received unexpected event network-vif-plugged-06fece4f-69a0-4370-8c8f-4f7895e7f5f8 for instance with vm_state building and task_state spawning. [ 586.374031] env[61649]: DEBUG nova.network.neutron [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Updated VIF entry in instance network info cache for port 09bcc069-4b84-4f9f-8360-b7906d608415. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 586.374494] env[61649]: DEBUG nova.network.neutron [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Updating instance_info_cache with network_info: [{"id": "09bcc069-4b84-4f9f-8360-b7906d608415", "address": "fa:16:3e:e9:80:d0", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09bcc069-4b", "ovs_interfaceid": "09bcc069-4b84-4f9f-8360-b7906d608415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.375246] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Expecting reply to msg 5a3241190a9a431da716aaec76403bf2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 586.384441] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a3241190a9a431da716aaec76403bf2 [ 586.385093] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Releasing lock "refresh_cache-eb0c04e3-1234-445c-bfa6-e031dd0b89d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.385448] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Received event network-vif-plugged-ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 586.385947] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Acquiring lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.386276] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.386771] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.387075] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] No waiting events found dispatching network-vif-plugged-ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 586.387448] env[61649]: WARNING nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Received unexpected event network-vif-plugged-ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 for instance with vm_state building and task_state spawning. [ 586.387734] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Received event network-changed-ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 586.388123] env[61649]: DEBUG nova.compute.manager [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Refreshing instance network info cache due to event network-changed-ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 586.388639] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Acquiring lock "refresh_cache-fd0ac9db-adc2-46f2-93ff-0b7e299534a7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.388901] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Acquired lock "refresh_cache-fd0ac9db-adc2-46f2-93ff-0b7e299534a7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.389377] env[61649]: DEBUG nova.network.neutron [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Refreshing network info cache for port ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 586.390049] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Expecting reply to msg b93c22a55ac24ee584707eb407ecbf94 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 586.397960] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b93c22a55ac24ee584707eb407ecbf94 [ 586.611127] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.611431] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.611643] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.781372] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Successfully updated port: ba529c28-ed7b-4228-96eb-a795fa6b4f90 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 586.781851] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 72a0866b9d5c41ed95e618d75bc64d9e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 586.788636] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72a0866b9d5c41ed95e618d75bc64d9e [ 586.789467] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "refresh_cache-99f9912a-edf0-40f5-a7ce-55767081705b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.789603] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired lock "refresh_cache-99f9912a-edf0-40f5-a7ce-55767081705b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.789750] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 586.790225] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 7809ff56cff94a12a80bbac4ce79d969 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 586.796805] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7809ff56cff94a12a80bbac4ce79d969 [ 587.025953] env[61649]: DEBUG nova.network.neutron [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Updated VIF entry in instance network info cache for port ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 587.025953] env[61649]: DEBUG nova.network.neutron [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Updating instance_info_cache with network_info: [{"id": "ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45", "address": "fa:16:3e:d0:3b:1f", "network": {"id": "796a0a57-5bc9-4906-b039-3342b997f972", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-191920708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6758fd3414654235a7cc71acfdefa72d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc0c8e2-6e", "ovs_interfaceid": "ffc0c8e2-6ea5-4ef1-be5a-c062c7652b45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.026082] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Expecting reply to msg edd366e2de394956bf16db72c163e79d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 587.033131] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edd366e2de394956bf16db72c163e79d [ 587.033806] env[61649]: DEBUG oslo_concurrency.lockutils [req-95da7399-39ac-4600-bcf2-fe87d046d58f req-1ad0428a-3350-424d-92f0-d9c278cc5d53 service nova] Releasing lock "refresh_cache-fd0ac9db-adc2-46f2-93ff-0b7e299534a7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.107902] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 587.656375] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Updating instance_info_cache with network_info: [{"id": "ba529c28-ed7b-4228-96eb-a795fa6b4f90", "address": "fa:16:3e:2a:64:75", "network": {"id": "a3b98455-bec9-49f9-b508-a0338e3e56a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1274795432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a70e175710d40f1b889d65c5eaca043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba529c28-ed", "ovs_interfaceid": "ba529c28-ed7b-4228-96eb-a795fa6b4f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.656704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 22da868cc2204197b434319f42c37519 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 587.676116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22da868cc2204197b434319f42c37519 [ 587.677163] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Releasing lock "refresh_cache-99f9912a-edf0-40f5-a7ce-55767081705b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.677163] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance network_info: |[{"id": "ba529c28-ed7b-4228-96eb-a795fa6b4f90", "address": "fa:16:3e:2a:64:75", "network": {"id": "a3b98455-bec9-49f9-b508-a0338e3e56a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1274795432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a70e175710d40f1b889d65c5eaca043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba529c28-ed", "ovs_interfaceid": "ba529c28-ed7b-4228-96eb-a795fa6b4f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 587.677893] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:64:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba529c28-ed7b-4228-96eb-a795fa6b4f90', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.693860] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating folder: Project (5a70e175710d40f1b889d65c5eaca043). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.694526] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c2de416-0884-4617-bb7e-ee17930983b7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.705748] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Created folder: Project (5a70e175710d40f1b889d65c5eaca043) in parent group-v51588. [ 587.705950] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating folder: Instances. Parent ref: group-v51616. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.706257] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00a5ea88-7d76-4561-a9a4-8bac9e1947cf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.717151] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Created folder: Instances in parent group-v51616. [ 587.717480] env[61649]: DEBUG oslo.service.loopingcall [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.717673] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 587.717870] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-325069cf-99ec-434f-a3bf-2dde96ce79c4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.739872] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.739872] env[61649]: value = "task-158112" [ 587.739872] env[61649]: _type = "Task" [ 587.739872] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.747862] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158112, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.162958] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.162958] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.251256] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158112, 'name': CreateVM_Task, 'duration_secs': 0.296154} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.251356] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.252077] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.252429] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.252567] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 588.252825] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8eb636a-27ba-4fb7-807d-7934c9e4ff0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.257287] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 588.257287] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5222bfe2-ca37-bf0d-94f9-e902658a3540" [ 588.257287] env[61649]: _type = "Task" [ 588.257287] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.265062] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5222bfe2-ca37-bf0d-94f9-e902658a3540, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.767650] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.767650] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 588.767650] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.840133] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "95426048-d403-4dad-9ad7-b76de655a319" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.840313] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "95426048-d403-4dad-9ad7-b76de655a319" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.005129] env[61649]: DEBUG nova.compute.manager [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Received event network-vif-plugged-ba529c28-ed7b-4228-96eb-a795fa6b4f90 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 589.005341] env[61649]: DEBUG oslo_concurrency.lockutils [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Acquiring lock "99f9912a-edf0-40f5-a7ce-55767081705b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.005537] env[61649]: DEBUG oslo_concurrency.lockutils [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Lock "99f9912a-edf0-40f5-a7ce-55767081705b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.005700] env[61649]: DEBUG oslo_concurrency.lockutils [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Lock "99f9912a-edf0-40f5-a7ce-55767081705b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.005860] env[61649]: DEBUG nova.compute.manager [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] No waiting events found dispatching network-vif-plugged-ba529c28-ed7b-4228-96eb-a795fa6b4f90 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 589.006020] env[61649]: WARNING nova.compute.manager [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Received unexpected event network-vif-plugged-ba529c28-ed7b-4228-96eb-a795fa6b4f90 for instance with vm_state building and task_state spawning. [ 589.006179] env[61649]: DEBUG nova.compute.manager [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Received event network-changed-ba529c28-ed7b-4228-96eb-a795fa6b4f90 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 589.006330] env[61649]: DEBUG nova.compute.manager [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Refreshing instance network info cache due to event network-changed-ba529c28-ed7b-4228-96eb-a795fa6b4f90. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 589.006508] env[61649]: DEBUG oslo_concurrency.lockutils [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Acquiring lock "refresh_cache-99f9912a-edf0-40f5-a7ce-55767081705b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.006641] env[61649]: DEBUG oslo_concurrency.lockutils [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Acquired lock "refresh_cache-99f9912a-edf0-40f5-a7ce-55767081705b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.006846] env[61649]: DEBUG nova.network.neutron [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Refreshing network info cache for port ba529c28-ed7b-4228-96eb-a795fa6b4f90 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 589.007269] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Expecting reply to msg e5ad40ad8c4a4541bc987ac23a7bd68e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 589.015370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5ad40ad8c4a4541bc987ac23a7bd68e [ 589.530607] env[61649]: DEBUG nova.network.neutron [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Updated VIF entry in instance network info cache for port ba529c28-ed7b-4228-96eb-a795fa6b4f90. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 589.530991] env[61649]: DEBUG nova.network.neutron [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Updating instance_info_cache with network_info: [{"id": "ba529c28-ed7b-4228-96eb-a795fa6b4f90", "address": "fa:16:3e:2a:64:75", "network": {"id": "a3b98455-bec9-49f9-b508-a0338e3e56a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1274795432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a70e175710d40f1b889d65c5eaca043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba529c28-ed", "ovs_interfaceid": "ba529c28-ed7b-4228-96eb-a795fa6b4f90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.531535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Expecting reply to msg ef3aa3fc6c064bd99aa259d7f446d5e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 589.546301] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef3aa3fc6c064bd99aa259d7f446d5e0 [ 589.546301] env[61649]: DEBUG oslo_concurrency.lockutils [req-17f184f2-2d18-4f38-b7ca-e763712130ba req-719515a4-2b9e-4b2f-b750-37b2a281ff0d service nova] Releasing lock "refresh_cache-99f9912a-edf0-40f5-a7ce-55767081705b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.565972] env[61649]: DEBUG nova.compute.manager [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Received event network-changed-06fece4f-69a0-4370-8c8f-4f7895e7f5f8 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 589.566223] env[61649]: DEBUG nova.compute.manager [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Refreshing instance network info cache due to event network-changed-06fece4f-69a0-4370-8c8f-4f7895e7f5f8. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 589.566471] env[61649]: DEBUG oslo_concurrency.lockutils [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] Acquiring lock "refresh_cache-00931111-13a1-447d-a401-943221badd59" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.566991] env[61649]: DEBUG oslo_concurrency.lockutils [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] Acquired lock "refresh_cache-00931111-13a1-447d-a401-943221badd59" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.566991] env[61649]: DEBUG nova.network.neutron [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Refreshing network info cache for port 06fece4f-69a0-4370-8c8f-4f7895e7f5f8 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 589.567377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] Expecting reply to msg 9f21564c665342eca441272fd286c5b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 589.575122] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f21564c665342eca441272fd286c5b7 [ 590.439467] env[61649]: DEBUG nova.network.neutron [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Updated VIF entry in instance network info cache for port 06fece4f-69a0-4370-8c8f-4f7895e7f5f8. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 590.439816] env[61649]: DEBUG nova.network.neutron [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] [instance: 00931111-13a1-447d-a401-943221badd59] Updating instance_info_cache with network_info: [{"id": "06fece4f-69a0-4370-8c8f-4f7895e7f5f8", "address": "fa:16:3e:5a:7e:01", "network": {"id": "d523315a-b523-4024-9606-3990772ae627", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-616506589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b94780269444437fa1c7ff05a9723f42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06fece4f-69", "ovs_interfaceid": "06fece4f-69a0-4370-8c8f-4f7895e7f5f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.440398] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] Expecting reply to msg ea6115b54a2c4d78b907f934d086ba48 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 590.449466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea6115b54a2c4d78b907f934d086ba48 [ 590.450100] env[61649]: DEBUG oslo_concurrency.lockutils [req-ad68cb87-b63c-4c59-9f7c-59b168b4e371 req-9c8d1667-52a9-4837-9727-dca49ef401a8 service nova] Releasing lock "refresh_cache-00931111-13a1-447d-a401-943221badd59" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.297866] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "adc73db6-8bff-4007-ae74-528a37840d96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.298107] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "adc73db6-8bff-4007-ae74-528a37840d96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.592410] env[61649]: DEBUG oslo_concurrency.lockutils [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] Acquiring lock "788fc9bb-2f88-4f82-88cf-9c7a002edb47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.592655] env[61649]: DEBUG oslo_concurrency.lockutils [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] Lock "788fc9bb-2f88-4f82-88cf-9c7a002edb47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.278681] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] Acquiring lock "62d0a02d-88af-48f2-a14a-c9f2e899babe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.278973] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] Lock "62d0a02d-88af-48f2-a14a-c9f2e899babe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.773544] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Acquiring lock "c7fac9e7-0802-4f2e-a577-4ee50efa835a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.773863] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Lock "c7fac9e7-0802-4f2e-a577-4ee50efa835a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.536448] env[61649]: DEBUG oslo_concurrency.lockutils [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] Acquiring lock "b9315087-b61c-488c-aaa9-5f4b4e2f12b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.536771] env[61649]: DEBUG oslo_concurrency.lockutils [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] Lock "b9315087-b61c-488c-aaa9-5f4b4e2f12b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.356574] env[61649]: DEBUG oslo_concurrency.lockutils [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Acquiring lock "3889663a-53e8-4d3e-bed6-5e86519522ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.356574] env[61649]: DEBUG oslo_concurrency.lockutils [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "3889663a-53e8-4d3e-bed6-5e86519522ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.388543] env[61649]: DEBUG oslo_concurrency.lockutils [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Acquiring lock "833dbc2a-a434-4ca1-aa33-b48a910c0e91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.388761] env[61649]: DEBUG oslo_concurrency.lockutils [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "833dbc2a-a434-4ca1-aa33-b48a910c0e91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.536597] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Acquiring lock "8819ddfb-4286-455b-8216-05e89424183e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.536928] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Lock "8819ddfb-4286-455b-8216-05e89424183e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.616088] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Acquiring lock "fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.616360] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Lock "fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.927129] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] Acquiring lock "fc905ad2-7f1d-4356-a8f7-1eda98cdd01d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.927494] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] Lock "fc905ad2-7f1d-4356-a8f7-1eda98cdd01d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.388194] env[61649]: DEBUG oslo_concurrency.lockutils [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Acquiring lock "4868f1fe-04d3-4055-bf61-8a46723cf573" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.388443] env[61649]: DEBUG oslo_concurrency.lockutils [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Lock "4868f1fe-04d3-4055-bf61-8a46723cf573" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.882108] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Acquiring lock "fffdaf35-4e17-40ba-95a7-cf34fa04737e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.882228] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Lock "fffdaf35-4e17-40ba-95a7-cf34fa04737e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.647397] env[61649]: WARNING oslo_vmware.rw_handles [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 610.647397] env[61649]: ERROR oslo_vmware.rw_handles [ 610.647991] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 610.649041] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 610.649331] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Copying Virtual Disk [datastore1] vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/e498a504-68df-4b14-aba3-6b12d086305b/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 610.649609] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91cd9233-8402-4286-9b08-1c5ff1c7b688 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.660582] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Waiting for the task: (returnval){ [ 610.660582] env[61649]: value = "task-158113" [ 610.660582] env[61649]: _type = "Task" [ 610.660582] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.665934] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Task: {'id': task-158113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.168340] env[61649]: DEBUG oslo_vmware.exceptions [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 611.168609] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.171969] env[61649]: ERROR nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 611.171969] env[61649]: Faults: ['InvalidArgument'] [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Traceback (most recent call last): [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] yield resources [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self.driver.spawn(context, instance, image_meta, [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self._fetch_image_if_missing(context, vi) [ 611.171969] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] image_cache(vi, tmp_image_ds_loc) [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] vm_util.copy_virtual_disk( [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] session._wait_for_task(vmdk_copy_task) [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] return self.wait_for_task(task_ref) [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] return evt.wait() [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] result = hub.switch() [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 611.172434] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] return self.greenlet.switch() [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self.f(*self.args, **self.kw) [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] raise exceptions.translate_fault(task_info.error) [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Faults: ['InvalidArgument'] [ 611.172844] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] [ 611.172844] env[61649]: INFO nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Terminating instance [ 611.173919] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.174128] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 611.174640] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "refresh_cache-683996e1-4e16-4add-8fa6-3c2843ebbf21" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.174793] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquired lock "refresh_cache-683996e1-4e16-4add-8fa6-3c2843ebbf21" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.174954] env[61649]: DEBUG nova.network.neutron [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 611.175388] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 220c00d392a8430d889b2e41be89ee05 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 611.176135] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d0df832-70a7-4d03-94fe-2eeff3c9bb42 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.185871] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 611.186061] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 611.187096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 220c00d392a8430d889b2e41be89ee05 [ 611.187487] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab471a46-0f7f-4567-bed7-3fb9a9812851 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.196644] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Waiting for the task: (returnval){ [ 611.196644] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52647fa8-5e35-f4cb-fa58-94522466fa0f" [ 611.196644] env[61649]: _type = "Task" [ 611.196644] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.206217] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52647fa8-5e35-f4cb-fa58-94522466fa0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.210623] env[61649]: DEBUG nova.network.neutron [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 611.316090] env[61649]: DEBUG nova.network.neutron [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.316643] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg c6d52f03ff4047fe93eba9c3b037ea9f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 611.324982] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6d52f03ff4047fe93eba9c3b037ea9f [ 611.325506] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Releasing lock "refresh_cache-683996e1-4e16-4add-8fa6-3c2843ebbf21" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.325912] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 611.326100] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 611.327165] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc95048f-7bb9-49d2-bac9-d0d32f59dba6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.335052] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 611.335291] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7091948-dea2-4002-b9e6-38159cbd0068 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.373978] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 611.374184] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 611.374357] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Deleting the datastore file [datastore1] 683996e1-4e16-4add-8fa6-3c2843ebbf21 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 611.374608] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a305503b-9976-4e00-b252-c4251ccb4940 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.380586] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Waiting for the task: (returnval){ [ 611.380586] env[61649]: value = "task-158115" [ 611.380586] env[61649]: _type = "Task" [ 611.380586] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.388224] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Task: {'id': task-158115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.504135] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Acquiring lock "bf8a0b66-22ef-4f1e-99a3-9727d4a61c02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.504135] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Lock "bf8a0b66-22ef-4f1e-99a3-9727d4a61c02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.706389] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 611.706773] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Creating directory with path [datastore1] vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 611.707023] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b76e7529-17c3-45b7-8b1e-a7433a3b309f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.719273] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Created directory with path [datastore1] vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 611.719484] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Fetch image to [datastore1] vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 611.719667] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 611.720925] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294c71ee-b6cf-40f2-9637-f30ada16b913 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.730701] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894123c8-3d31-4ac6-a60e-ccd4c0e147b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.741334] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc8da76-0ba5-4f74-8ed9-e0e47cc69807 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.777175] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea804fc0-8f70-40a5-88d2-38aa78de1fbb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.784929] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9910dec3-6965-40a9-8a7f-8fa22ef125b3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.824040] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 611.892381] env[61649]: DEBUG oslo_vmware.api [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Task: {'id': task-158115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033552} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.892381] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 611.892381] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 611.892381] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 611.892381] env[61649]: INFO nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Took 0.57 seconds to destroy the instance on the hypervisor. [ 611.892649] env[61649]: DEBUG oslo.service.loopingcall [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.892906] env[61649]: DEBUG oslo_vmware.rw_handles [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 611.894231] env[61649]: DEBUG nova.compute.manager [-] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Skipping network deallocation for instance since networking was not requested. {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 611.896651] env[61649]: DEBUG nova.compute.claims [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 611.896884] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.897169] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.899134] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg d29712f0870a491db1164411fbeaabeb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 611.956775] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d29712f0870a491db1164411fbeaabeb [ 611.972115] env[61649]: DEBUG oslo_vmware.rw_handles [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 611.972334] env[61649]: DEBUG oslo_vmware.rw_handles [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 612.353846] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db4d14b-87cd-49a0-b9cf-af93d6ae7741 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.361240] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8479366-0407-4937-b438-21dffe2aef03 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.402684] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfddd154-b03d-4e50-8713-d46f3c71d437 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.411623] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd45e740-76d4-412e-bf53-aa83aca0254f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.425347] env[61649]: DEBUG nova.compute.provider_tree [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.426054] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 12368f1d3b2249e0b8a1a9104c876228 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.433690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12368f1d3b2249e0b8a1a9104c876228 [ 612.434607] env[61649]: DEBUG nova.scheduler.client.report [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 612.436982] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 43a8bc0dfb2141acb7dd7fa286326b4e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.448722] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43a8bc0dfb2141acb7dd7fa286326b4e [ 612.449466] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.552s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.449970] env[61649]: ERROR nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 612.449970] env[61649]: Faults: ['InvalidArgument'] [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Traceback (most recent call last): [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self.driver.spawn(context, instance, image_meta, [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self._fetch_image_if_missing(context, vi) [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] image_cache(vi, tmp_image_ds_loc) [ 612.449970] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] vm_util.copy_virtual_disk( [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] session._wait_for_task(vmdk_copy_task) [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] return self.wait_for_task(task_ref) [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] return evt.wait() [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] result = hub.switch() [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] return self.greenlet.switch() [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 612.450322] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] self.f(*self.args, **self.kw) [ 612.450647] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 612.450647] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] raise exceptions.translate_fault(task_info.error) [ 612.450647] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 612.450647] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Faults: ['InvalidArgument'] [ 612.450647] env[61649]: ERROR nova.compute.manager [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] [ 612.450781] env[61649]: DEBUG nova.compute.utils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.453331] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Build of instance 683996e1-4e16-4add-8fa6-3c2843ebbf21 was re-scheduled: A specified parameter was not correct: fileType [ 612.453331] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 612.453720] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 612.453951] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquiring lock "refresh_cache-683996e1-4e16-4add-8fa6-3c2843ebbf21" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.454097] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Acquired lock "refresh_cache-683996e1-4e16-4add-8fa6-3c2843ebbf21" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.454255] env[61649]: DEBUG nova.network.neutron [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 612.454631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg 27fa702d971445efb29a2db7e7af3a6a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.461644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27fa702d971445efb29a2db7e7af3a6a [ 612.493462] env[61649]: DEBUG nova.network.neutron [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 612.728693] env[61649]: DEBUG nova.network.neutron [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.729250] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg ed8db422351b4b62a68d9151c6efb05c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.737448] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8db422351b4b62a68d9151c6efb05c [ 612.738022] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Releasing lock "refresh_cache-683996e1-4e16-4add-8fa6-3c2843ebbf21" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.738240] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 612.738431] env[61649]: DEBUG nova.compute.manager [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] [instance: 683996e1-4e16-4add-8fa6-3c2843ebbf21] Skipping network deallocation for instance since networking was not requested. {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 612.741453] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg c6440d16af044559b7f3c8a4ab11da25 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.772603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6440d16af044559b7f3c8a4ab11da25 [ 612.774837] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg b7ca9dc4547f4885bcdcb702cd737e9e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.829601] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7ca9dc4547f4885bcdcb702cd737e9e [ 612.839627] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] Acquiring lock "6126223b-c712-4260-a49b-7a56c4035e75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.839849] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] Lock "6126223b-c712-4260-a49b-7a56c4035e75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.861711] env[61649]: INFO nova.scheduler.client.report [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Deleted allocations for instance 683996e1-4e16-4add-8fa6-3c2843ebbf21 [ 612.867557] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Expecting reply to msg ae23cbeed68648fbac63177f0f3ceb16 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.880991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae23cbeed68648fbac63177f0f3ceb16 [ 612.881610] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3fef2cb9-f777-41a8-93be-17ea526fb456 tempest-ServersAdmin275Test-1307627859 tempest-ServersAdmin275Test-1307627859-project-member] Lock "683996e1-4e16-4add-8fa6-3c2843ebbf21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.937s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.882160] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 908ab5086a7541e090faa650aa385100 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.918500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 908ab5086a7541e090faa650aa385100 [ 612.919026] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 612.921237] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg a845634da57e43a1a6fd66584f4d4485 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 612.962424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a845634da57e43a1a6fd66584f4d4485 [ 612.975684] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.975925] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.977728] env[61649]: INFO nova.compute.claims [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.979990] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 43da0fe5383941fabea8bd852b832ffa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.016625] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43da0fe5383941fabea8bd852b832ffa [ 613.018467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg f35d655460ff448e92b123923f29ed5c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.026058] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f35d655460ff448e92b123923f29ed5c [ 613.417871] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48ce3bf-337a-404a-8a28-9167f7d57936 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.425844] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4cc269-7cfb-432d-a88f-ed6a1064997c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.454653] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8399ccb9-b24b-4d30-b70d-1adccf66b607 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.461671] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d791892-39cb-4535-a32c-22c973f16bfc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.475400] env[61649]: DEBUG nova.compute.provider_tree [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.477391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 071d1f73041a4d2a95c3e5837ef491cf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.484409] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 071d1f73041a4d2a95c3e5837ef491cf [ 613.485333] env[61649]: DEBUG nova.scheduler.client.report [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 613.487540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 367d78eb17c1429c837efb17fa01a5c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.497874] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 367d78eb17c1429c837efb17fa01a5c8 [ 613.498562] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.523s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.499097] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 613.500866] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 963cd34c45cf44ffb8892f489f810595 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.530055] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 963cd34c45cf44ffb8892f489f810595 [ 613.532123] env[61649]: DEBUG nova.compute.utils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.532769] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 7103778c48954cad97d0c781fc1c821e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.534368] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 613.534917] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 613.550252] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7103778c48954cad97d0c781fc1c821e [ 613.550920] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 613.553041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 53139594c0724393ad532ccfc5a3d84b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.585147] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53139594c0724393ad532ccfc5a3d84b [ 613.587892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 012daff361bf4c36871c56d318105aa9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 613.605541] env[61649]: DEBUG nova.policy [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1794bd0a021c40cbb2ac1a88359c88db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2b0ea45199c4025a6d3ecd769b154ac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 613.632106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 012daff361bf4c36871c56d318105aa9 [ 613.633337] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 613.654435] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.654606] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.654788] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.654931] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.655078] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.655219] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.655417] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.655571] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.655730] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.655888] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.656070] env[61649]: DEBUG nova.virt.hardware [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.656911] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aa669f-9234-410d-8659-212139ac9ba2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.665129] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796700d5-831e-4c4a-af2b-29eb7394d0e7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.277770] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Successfully created port: 80e67951-2c3d-47b5-8138-95d2a17fa1e2 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.976296] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Successfully updated port: 80e67951-2c3d-47b5-8138-95d2a17fa1e2 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 615.976296] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 356c5709173c417aa363479ceff6dfb1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 615.989042] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 356c5709173c417aa363479ceff6dfb1 [ 615.989610] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "refresh_cache-7f9f2074-6822-4d9d-9791-4bebc7e55862" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.989737] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquired lock "refresh_cache-7f9f2074-6822-4d9d-9791-4bebc7e55862" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.989890] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 615.990355] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg c8fc47d7e6d845a7a6de937e470de230 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 616.000540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8fc47d7e6d845a7a6de937e470de230 [ 616.086765] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 616.580310] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Updating instance_info_cache with network_info: [{"id": "80e67951-2c3d-47b5-8138-95d2a17fa1e2", "address": "fa:16:3e:69:3f:a6", "network": {"id": "c929dfa8-bd36-48c6-bfd9-71ab510986bb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-822183821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b0ea45199c4025a6d3ecd769b154ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e67951-2c", "ovs_interfaceid": "80e67951-2c3d-47b5-8138-95d2a17fa1e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.581540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg f391a33f698c4a73bd2b9ae6149e3f97 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 616.597094] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f391a33f698c4a73bd2b9ae6149e3f97 [ 616.597766] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Releasing lock "refresh_cache-7f9f2074-6822-4d9d-9791-4bebc7e55862" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.598126] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance network_info: |[{"id": "80e67951-2c3d-47b5-8138-95d2a17fa1e2", "address": "fa:16:3e:69:3f:a6", "network": {"id": "c929dfa8-bd36-48c6-bfd9-71ab510986bb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-822183821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b0ea45199c4025a6d3ecd769b154ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e67951-2c", "ovs_interfaceid": "80e67951-2c3d-47b5-8138-95d2a17fa1e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 616.598454] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:3f:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '101a44fc-ffde-4e3e-ad82-363454ae458b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80e67951-2c3d-47b5-8138-95d2a17fa1e2', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.605940] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Creating folder: Project (f2b0ea45199c4025a6d3ecd769b154ac). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 616.606492] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5b4c226-8c66-4d91-b37b-077f2c30a5b5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.618912] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Created folder: Project (f2b0ea45199c4025a6d3ecd769b154ac) in parent group-v51588. [ 616.619249] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Creating folder: Instances. Parent ref: group-v51619. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 616.619493] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b769242-2662-4ba6-b639-d497167a85f4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.627947] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Created folder: Instances in parent group-v51619. [ 616.628259] env[61649]: DEBUG oslo.service.loopingcall [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 616.628449] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 616.628638] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e2a160d-d287-41a3-8500-545f7cff5514 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.647750] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.647750] env[61649]: value = "task-158118" [ 616.647750] env[61649]: _type = "Task" [ 616.647750] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.665135] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158118, 'name': CreateVM_Task} progress is 6%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.079823] env[61649]: DEBUG nova.compute.manager [req-0b32b314-df99-4351-8a99-74f12325524d req-4c20c12d-2d6b-4e20-b0be-d734c36701a7 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Received event network-vif-plugged-80e67951-2c3d-47b5-8138-95d2a17fa1e2 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 617.079823] env[61649]: DEBUG oslo_concurrency.lockutils [req-0b32b314-df99-4351-8a99-74f12325524d req-4c20c12d-2d6b-4e20-b0be-d734c36701a7 service nova] Acquiring lock "7f9f2074-6822-4d9d-9791-4bebc7e55862-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.081740] env[61649]: DEBUG oslo_concurrency.lockutils [req-0b32b314-df99-4351-8a99-74f12325524d req-4c20c12d-2d6b-4e20-b0be-d734c36701a7 service nova] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.081740] env[61649]: DEBUG oslo_concurrency.lockutils [req-0b32b314-df99-4351-8a99-74f12325524d req-4c20c12d-2d6b-4e20-b0be-d734c36701a7 service nova] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.081740] env[61649]: DEBUG nova.compute.manager [req-0b32b314-df99-4351-8a99-74f12325524d req-4c20c12d-2d6b-4e20-b0be-d734c36701a7 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] No waiting events found dispatching network-vif-plugged-80e67951-2c3d-47b5-8138-95d2a17fa1e2 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 617.081740] env[61649]: WARNING nova.compute.manager [req-0b32b314-df99-4351-8a99-74f12325524d req-4c20c12d-2d6b-4e20-b0be-d734c36701a7 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Received unexpected event network-vif-plugged-80e67951-2c3d-47b5-8138-95d2a17fa1e2 for instance with vm_state building and task_state spawning. [ 617.161592] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158118, 'name': CreateVM_Task, 'duration_secs': 0.325982} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.162502] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.163483] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.163815] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.164297] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 617.164733] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e244350e-f19c-4c36-9801-abda654bbe0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.176305] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Waiting for the task: (returnval){ [ 617.176305] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5294b158-4d67-cc9e-f10e-c8bcbabe7609" [ 617.176305] env[61649]: _type = "Task" [ 617.176305] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.192936] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.193334] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.193672] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.953061] env[61649]: DEBUG nova.compute.manager [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Received event network-changed-80e67951-2c3d-47b5-8138-95d2a17fa1e2 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 619.953304] env[61649]: DEBUG nova.compute.manager [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Refreshing instance network info cache due to event network-changed-80e67951-2c3d-47b5-8138-95d2a17fa1e2. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 619.953468] env[61649]: DEBUG oslo_concurrency.lockutils [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] Acquiring lock "refresh_cache-7f9f2074-6822-4d9d-9791-4bebc7e55862" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.953608] env[61649]: DEBUG oslo_concurrency.lockutils [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] Acquired lock "refresh_cache-7f9f2074-6822-4d9d-9791-4bebc7e55862" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.953764] env[61649]: DEBUG nova.network.neutron [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Refreshing network info cache for port 80e67951-2c3d-47b5-8138-95d2a17fa1e2 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 619.954310] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] Expecting reply to msg fcee75df0d234b22a560b70d15c3dce1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 619.962256] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcee75df0d234b22a560b70d15c3dce1 [ 620.555881] env[61649]: DEBUG nova.network.neutron [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Updated VIF entry in instance network info cache for port 80e67951-2c3d-47b5-8138-95d2a17fa1e2. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 620.555881] env[61649]: DEBUG nova.network.neutron [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Updating instance_info_cache with network_info: [{"id": "80e67951-2c3d-47b5-8138-95d2a17fa1e2", "address": "fa:16:3e:69:3f:a6", "network": {"id": "c929dfa8-bd36-48c6-bfd9-71ab510986bb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-822183821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b0ea45199c4025a6d3ecd769b154ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80e67951-2c", "ovs_interfaceid": "80e67951-2c3d-47b5-8138-95d2a17fa1e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.556023] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] Expecting reply to msg fdcd1c00dd8f4094b8f2bd0f37d0b7ad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 620.564546] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdcd1c00dd8f4094b8f2bd0f37d0b7ad [ 620.565019] env[61649]: DEBUG oslo_concurrency.lockutils [req-5d632ab7-4f5c-4148-9957-48f29c4e2285 req-74b4dc89-18d8-4fd7-a323-fcf0d0ab0e60 service nova] Releasing lock "refresh_cache-7f9f2074-6822-4d9d-9791-4bebc7e55862" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.941007] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.941007] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.858073] env[61649]: DEBUG oslo_concurrency.lockutils [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] Acquiring lock "9142a98b-6400-4cd2-b21f-29a435f95503" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.858418] env[61649]: DEBUG oslo_concurrency.lockutils [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] Lock "9142a98b-6400-4cd2-b21f-29a435f95503" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.838626] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Acquiring lock "8999b9ee-ae7e-4438-80b7-dffdb3e92630" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.838951] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Lock "8999b9ee-ae7e-4438-80b7-dffdb3e92630" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.177241] env[61649]: DEBUG oslo_concurrency.lockutils [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] Acquiring lock "018ab9c2-8c6d-4836-9e26-70ffc33b9b30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.177241] env[61649]: DEBUG oslo_concurrency.lockutils [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] Lock "018ab9c2-8c6d-4836-9e26-70ffc33b9b30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.034342] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Acquiring lock "93bf61a5-0737-4495-854d-14f1feebab86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.034578] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Lock "93bf61a5-0737-4495-854d-14f1feebab86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.066372] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Acquiring lock "f814cbd2-8d20-4a26-9bae-000a70a3e082" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.066691] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Lock "f814cbd2-8d20-4a26-9bae-000a70a3e082" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.094970] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Acquiring lock "0fc6dad2-0cde-46db-b840-3bd2737a91af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.095213] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Lock "0fc6dad2-0cde-46db-b840-3bd2737a91af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.736962] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] Acquiring lock "f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.737695] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] Lock "f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.349811] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] Acquiring lock "28428441-219c-4627-857e-ab8b91390c68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.350214] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] Lock "28428441-219c-4627-857e-ab8b91390c68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.396259] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.396555] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.397145] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 520f407dc36c43e7adda720dc932bd83 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 642.415437] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 520f407dc36c43e7adda720dc932bd83 [ 642.419369] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.419566] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.419722] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.803347] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] Acquiring lock "5e9ab69f-856e-4b8d-808a-0799b87a9cc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.803576] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] Lock "5e9ab69f-856e-4b8d-808a-0799b87a9cc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.928497] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.928688] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 642.928811] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 642.929475] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 331d65c9825e4667ba19611992ba9bf9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 642.948070] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 331d65c9825e4667ba19611992ba9bf9 [ 642.950760] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.950916] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29281253-e489-48f5-b219-75ae984adb00] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951049] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951179] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951302] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951447] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951573] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951691] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951809] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.951928] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 642.952059] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 642.952544] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.952687] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 642.952842] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.953163] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4f2ee50a12604b82b26fb1a69a65bc78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 642.962086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f2ee50a12604b82b26fb1a69a65bc78 [ 642.962939] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.963138] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.963294] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.963438] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 642.964541] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8185d74-e997-4a18-afdf-0ee6739c59a5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.976607] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1710086a-6ada-4521-a296-fbd7a05a262d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.989752] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7e8868-c4f1-4574-8cc9-01f347d94ac6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.997364] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ab7206-f8bd-48c4-b394-521a8b5180d5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.029177] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181790MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 643.029334] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.029860] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.030815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 05141bc19d834d0ca882849d9edd6e3b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.077731] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05141bc19d834d0ca882849d9edd6e3b [ 643.082254] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bf7940e32aa54322824d30ba546fa8cd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.091243] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf7940e32aa54322824d30ba546fa8cd [ 643.116291] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6e68fe1-4ec8-4f0f-bc6b-168038b1998e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.116462] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29281253-e489-48f5-b219-75ae984adb00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.116591] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2c9b8ba9-193e-468f-bc4e-006ab413b374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.116715] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aacbebf5-bd31-465b-b574-6c4a98b27f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.116837] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7d93caea-4740-4bcd-9f06-9397d37a07b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.116961] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.117074] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.117191] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.117369] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.117486] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.118047] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 879b26a737b94ea9bd9b4a3c8e8c08f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.141085] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 879b26a737b94ea9bd9b4a3c8e8c08f1 [ 643.141892] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.142396] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b16ad177c19a49f897231529467bfe85 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.166264] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b16ad177c19a49f897231529467bfe85 [ 643.167144] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.167649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b319d74eeb064f63bd303635e2ad5950 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.180233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b319d74eeb064f63bd303635e2ad5950 [ 643.180934] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.181417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5c5c25fcf0374a59b945ea312c0dc0ae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.194091] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c5c25fcf0374a59b945ea312c0dc0ae [ 643.194744] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance adc73db6-8bff-4007-ae74-528a37840d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.195328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3d7ce03cd64d453e8f6a9dd9f5c5bc93 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.205242] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d7ce03cd64d453e8f6a9dd9f5c5bc93 [ 643.206021] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 788fc9bb-2f88-4f82-88cf-9c7a002edb47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.206508] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c13d55c2dbd54248a30e6f85d4076716 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.217217] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c13d55c2dbd54248a30e6f85d4076716 [ 643.218228] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 62d0a02d-88af-48f2-a14a-c9f2e899babe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.218228] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6aace62698914d8488db61e98caab4f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.236222] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6aace62698914d8488db61e98caab4f2 [ 643.236904] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c7fac9e7-0802-4f2e-a577-4ee50efa835a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.237445] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bb15342dc8f84515a19821468594d192 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.248635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb15342dc8f84515a19821468594d192 [ 643.249568] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b9315087-b61c-488c-aaa9-5f4b4e2f12b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.250113] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 74b027740c3240e3b6db2f25ed57a5d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.260223] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74b027740c3240e3b6db2f25ed57a5d2 [ 643.260960] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 3889663a-53e8-4d3e-bed6-5e86519522ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.261666] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a3700485efbb4785ae0668601cdaf4cd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.277669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3700485efbb4785ae0668601cdaf4cd [ 643.278606] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 833dbc2a-a434-4ca1-aa33-b48a910c0e91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.279472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bf5cafefb69a440d8cc91834fa442147 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.291441] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf5cafefb69a440d8cc91834fa442147 [ 643.292120] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8819ddfb-4286-455b-8216-05e89424183e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.292590] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6715e98e49bc469fb63ee3b77f96a3c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.324115] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6715e98e49bc469fb63ee3b77f96a3c4 [ 643.326234] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.326833] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 57a92f490d3e4ad5a1a5bc5d45dd631f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.345038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57a92f490d3e4ad5a1a5bc5d45dd631f [ 643.345038] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fc905ad2-7f1d-4356-a8f7-1eda98cdd01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.345038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7c768507a45649049af9baba529a629d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.356072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c768507a45649049af9baba529a629d [ 643.356791] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4868f1fe-04d3-4055-bf61-8a46723cf573 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.357279] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4f4a947209a74f5a8e2ec0b1fdfb962e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.367650] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f4a947209a74f5a8e2ec0b1fdfb962e [ 643.368380] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fffdaf35-4e17-40ba-95a7-cf34fa04737e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.368857] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f8ab6ac56aa6494c94d56ab3eb9f0271 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.379233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8ab6ac56aa6494c94d56ab3eb9f0271 [ 643.380096] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8a0b66-22ef-4f1e-99a3-9727d4a61c02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.380600] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e40403a0b87d49a59fb409cfdc605b60 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.390048] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e40403a0b87d49a59fb409cfdc605b60 [ 643.390725] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6126223b-c712-4260-a49b-7a56c4035e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.391249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ea042137eec646a5bef60d5ab3fbbdb1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.400741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea042137eec646a5bef60d5ab3fbbdb1 [ 643.401458] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.401935] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 009624bed7f74c12ac43d2c2524c5e58 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.417722] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 009624bed7f74c12ac43d2c2524c5e58 [ 643.418536] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9142a98b-6400-4cd2-b21f-29a435f95503 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.419023] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 502575127c3c440d97dad2f00eca724f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.428779] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 502575127c3c440d97dad2f00eca724f [ 643.429480] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8999b9ee-ae7e-4438-80b7-dffdb3e92630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.429954] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b9f44942d5884d479ea553254d1c4c18 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.439354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9f44942d5884d479ea553254d1c4c18 [ 643.440085] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 018ab9c2-8c6d-4836-9e26-70ffc33b9b30 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.440559] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5abcca9b532449d8af6d4f6d2db56918 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.449967] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5abcca9b532449d8af6d4f6d2db56918 [ 643.450101] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 93bf61a5-0737-4495-854d-14f1feebab86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.450567] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d529ed09d92b4673949d6af09873ead5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.459139] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d529ed09d92b4673949d6af09873ead5 [ 643.459767] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f814cbd2-8d20-4a26-9bae-000a70a3e082 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.460267] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 11dfb2a715d448b4b34916c2f7ac9b57 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.470062] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11dfb2a715d448b4b34916c2f7ac9b57 [ 643.470704] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fc6dad2-0cde-46db-b840-3bd2737a91af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.471834] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0ae2c59c8a2c452585ef6ce399ee160a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.480480] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ae2c59c8a2c452585ef6ce399ee160a [ 643.481106] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.482231] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 051549e5f3324554b2975d4756b1841f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.490523] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 051549e5f3324554b2975d4756b1841f [ 643.491139] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28428441-219c-4627-857e-ab8b91390c68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.491574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5426e171889840fcba150886986b410f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 643.501455] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5426e171889840fcba150886986b410f [ 643.502090] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5e9ab69f-856e-4b8d-808a-0799b87a9cc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.502332] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 643.502476] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 643.981458] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98436794-3b49-4653-bc43-f5f5d0ddc793 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.988996] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19d0ed5-2f1f-47bb-be42-68dc24dee1f2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.019338] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7545d371-397d-4616-8633-55d7ab3dd1f2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.026998] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90b2e76-0820-4811-b671-2f71309b5889 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.043200] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.043200] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 569021711480410cb56da69b35ad98ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 644.050205] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 569021711480410cb56da69b35ad98ce [ 644.051095] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 644.053364] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9dcee411564845ccb54a9273392844dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 644.078046] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dcee411564845ccb54a9273392844dc [ 644.078780] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 644.078953] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.049s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.485309] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] Acquiring lock "b176b001-3c32-439a-b6cd-9b608a0ac623" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.485734] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] Lock "b176b001-3c32-439a-b6cd-9b608a0ac623" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.055334] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 645.055568] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.979843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1d0667832eb2435f83bca08dbfeed1f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 650.992852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d0667832eb2435f83bca08dbfeed1f1 [ 660.500408] env[61649]: WARNING oslo_vmware.rw_handles [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 660.500408] env[61649]: ERROR oslo_vmware.rw_handles [ 660.500959] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 660.502599] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 660.502869] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Copying Virtual Disk [datastore1] vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/db963ba9-f4bd-447c-96ab-15902146dbc1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 660.503166] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3238b009-504d-44ba-925d-cf3eb6cfdcb8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.510806] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Waiting for the task: (returnval){ [ 660.510806] env[61649]: value = "task-158119" [ 660.510806] env[61649]: _type = "Task" [ 660.510806] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.518804] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Task: {'id': task-158119, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.021869] env[61649]: DEBUG oslo_vmware.exceptions [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 661.022199] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.022944] env[61649]: ERROR nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 661.022944] env[61649]: Faults: ['InvalidArgument'] [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Traceback (most recent call last): [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] yield resources [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self.driver.spawn(context, instance, image_meta, [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self._fetch_image_if_missing(context, vi) [ 661.022944] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] image_cache(vi, tmp_image_ds_loc) [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] vm_util.copy_virtual_disk( [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] session._wait_for_task(vmdk_copy_task) [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] return self.wait_for_task(task_ref) [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] return evt.wait() [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] result = hub.switch() [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 661.023265] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] return self.greenlet.switch() [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self.f(*self.args, **self.kw) [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] raise exceptions.translate_fault(task_info.error) [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Faults: ['InvalidArgument'] [ 661.023650] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] [ 661.023650] env[61649]: INFO nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Terminating instance [ 661.025205] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.025205] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.025322] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47288e71-d270-412d-9fc6-3a2ef598d1d6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.029796] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 661.029985] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 661.030755] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a48c6d-3294-4a3d-b26e-46a91e1e0efd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.034808] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.036024] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 661.036024] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7af3057e-f594-4f02-9060-9465cc1a0928 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.039976] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 661.040490] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-073639f4-f1f5-439b-80fa-e6b3c275b46d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.042745] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Waiting for the task: (returnval){ [ 661.042745] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c1b013-9093-280b-99fa-a7e07bff908d" [ 661.042745] env[61649]: _type = "Task" [ 661.042745] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.050227] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c1b013-9093-280b-99fa-a7e07bff908d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.114872] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 661.115298] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 661.115649] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Deleting the datastore file [datastore1] b6e68fe1-4ec8-4f0f-bc6b-168038b1998e {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.116082] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9d21734-2f95-426e-afa5-5041233ae79a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.124299] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Waiting for the task: (returnval){ [ 661.124299] env[61649]: value = "task-158121" [ 661.124299] env[61649]: _type = "Task" [ 661.124299] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.134224] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Task: {'id': task-158121, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.552771] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 661.553098] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Creating directory with path [datastore1] vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.553486] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bbe5ce4-1b6a-4dd4-a257-f3cae5e4b2f8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.565334] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Created directory with path [datastore1] vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.565600] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Fetch image to [datastore1] vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 661.565834] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 661.566604] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b877118c-48dc-40d1-bfc8-a9d97fd13795 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.573282] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5be10f-46be-42ec-aafd-907d055de699 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.582866] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25837e8-8e9d-487b-a76d-61bd9fb54f62 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.614856] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53eb74a-7226-4599-b007-7ff3c3483e89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.620594] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-db4cf486-7103-4a17-99f1-2e84f40ecaaf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.632915] env[61649]: DEBUG oslo_vmware.api [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Task: {'id': task-158121, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071791} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.633229] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 661.633469] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 661.633709] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 661.633945] env[61649]: INFO nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 661.636421] env[61649]: DEBUG nova.compute.claims [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 661.636601] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.636811] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.638628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 28fc47339cd4421db0025ebf17f5bbad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 661.652801] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 661.670958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28fc47339cd4421db0025ebf17f5bbad [ 661.710997] env[61649]: DEBUG oslo_vmware.rw_handles [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 661.774380] env[61649]: DEBUG oslo_vmware.rw_handles [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 661.774580] env[61649]: DEBUG oslo_vmware.rw_handles [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 662.160348] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b4f952-e78f-4b76-b3aa-64dceb6aea50 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.168117] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ee68f3-ba6d-4c8c-8305-c257d6ee0ab9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.197888] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaef9429-106f-4efe-98bb-86b7b6ca112a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.205064] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f37fdb-4373-4a69-b34e-dca099351e77 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.218715] env[61649]: DEBUG nova.compute.provider_tree [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.218715] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 94e85599dd7a47b38d684653145c58cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.229050] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94e85599dd7a47b38d684653145c58cb [ 662.229938] env[61649]: DEBUG nova.scheduler.client.report [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.232333] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg edf8620f85e844969a6108ab7f944fe1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.243198] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edf8620f85e844969a6108ab7f944fe1 [ 662.243911] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.607s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.244490] env[61649]: ERROR nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 662.244490] env[61649]: Faults: ['InvalidArgument'] [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Traceback (most recent call last): [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self.driver.spawn(context, instance, image_meta, [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self._fetch_image_if_missing(context, vi) [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] image_cache(vi, tmp_image_ds_loc) [ 662.244490] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] vm_util.copy_virtual_disk( [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] session._wait_for_task(vmdk_copy_task) [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] return self.wait_for_task(task_ref) [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] return evt.wait() [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] result = hub.switch() [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] return self.greenlet.switch() [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 662.244780] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] self.f(*self.args, **self.kw) [ 662.245069] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 662.245069] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] raise exceptions.translate_fault(task_info.error) [ 662.245069] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 662.245069] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Faults: ['InvalidArgument'] [ 662.245069] env[61649]: ERROR nova.compute.manager [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] [ 662.245488] env[61649]: DEBUG nova.compute.utils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 662.246970] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Build of instance b6e68fe1-4ec8-4f0f-bc6b-168038b1998e was re-scheduled: A specified parameter was not correct: fileType [ 662.246970] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 662.247417] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 662.247647] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 662.247853] env[61649]: DEBUG nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 662.248068] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 662.625014] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 8949b68351f94c6a83ef81b80eb5bb3a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.636774] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8949b68351f94c6a83ef81b80eb5bb3a [ 662.637378] env[61649]: DEBUG nova.network.neutron [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.637850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 1ca6c9451c7f4f70b48679bcb8fdc6bb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.648109] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ca6c9451c7f4f70b48679bcb8fdc6bb [ 662.648109] env[61649]: INFO nova.compute.manager [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: b6e68fe1-4ec8-4f0f-bc6b-168038b1998e] Took 0.40 seconds to deallocate network for instance. [ 662.650675] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg b9b3b4259dc24706984cb125075ff496 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.686909] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9b3b4259dc24706984cb125075ff496 [ 662.689561] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 06852dbe912c472588c35b61b1b67302 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.724822] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06852dbe912c472588c35b61b1b67302 [ 662.748474] env[61649]: INFO nova.scheduler.client.report [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Deleted allocations for instance b6e68fe1-4ec8-4f0f-bc6b-168038b1998e [ 662.754218] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 5e7c051d34aa4637942c2fbf3b288956 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.764501] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e7c051d34aa4637942c2fbf3b288956 [ 662.765087] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1b30457b-66f5-4364-af84-f4158359850e tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "b6e68fe1-4ec8-4f0f-bc6b-168038b1998e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.621s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.765680] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg a6976a35110d4985ad36c03d7ba8c3bb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.780908] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6976a35110d4985ad36c03d7ba8c3bb [ 662.781504] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 662.783272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 7912f9010cd04673a309dd48fcff5b08 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.820039] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7912f9010cd04673a309dd48fcff5b08 [ 662.838301] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.838643] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.840270] env[61649]: INFO nova.compute.claims [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.842468] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg dbd95d044a1a4cde8263221c2987b27a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.876218] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbd95d044a1a4cde8263221c2987b27a [ 662.878003] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg d755f09f0a9f4edfa74edcad2bc52f26 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 662.885544] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d755f09f0a9f4edfa74edcad2bc52f26 [ 663.331869] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7123c27a-a71a-4419-a1e7-4b1cb36096a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.339294] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd2a2d1-5a05-410c-9309-8e61def3ebfc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.370125] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be82eadb-7e5a-4815-a0ae-815b140173b0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.379384] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50885a4f-e35b-48e7-b300-d5829104fff4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.392924] env[61649]: DEBUG nova.compute.provider_tree [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.393430] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg cf9f4a013d3c4d56b82ad75ec3233092 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 663.400453] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf9f4a013d3c4d56b82ad75ec3233092 [ 663.401393] env[61649]: DEBUG nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 663.403640] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg ad6658f6fd4448e58b04323032891dfa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 663.415434] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad6658f6fd4448e58b04323032891dfa [ 663.416179] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.578s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.416640] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 663.418438] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 7c864c8445ec4d43b749d31ddc9bedbc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 663.449210] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c864c8445ec4d43b749d31ddc9bedbc [ 663.450786] env[61649]: DEBUG nova.compute.utils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.451377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 6657a8e4ffae4ebf93b748c9109ddfb5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 663.452397] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 663.452563] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 663.464374] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6657a8e4ffae4ebf93b748c9109ddfb5 [ 663.464917] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 663.466517] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 2bfbe6834ba441e19d4963919b6c9bb4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 663.495778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bfbe6834ba441e19d4963919b6c9bb4 [ 663.498434] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 25e033bd325540eb82334d197c672726 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 663.500909] env[61649]: DEBUG nova.policy [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f9c51cfef1646d4986c4cea34966be4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a70e175710d40f1b889d65c5eaca043', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 663.539880] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25e033bd325540eb82334d197c672726 [ 663.540999] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 663.561867] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 663.562105] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 663.562255] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.562430] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 663.562571] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.562710] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 663.562907] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 663.563069] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 663.563231] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 663.563389] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 663.563555] env[61649]: DEBUG nova.virt.hardware [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 663.564398] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e2b5fb-6208-4d7b-bdfd-8589fb8b8b2c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.573403] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67bdaf8-f5db-4062-b80c-07b32b474853 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.818687] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Successfully created port: 1649a2d0-32f2-4727-ac50-4c34c9435fea {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.340706] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Acquiring lock "95dad1e2-74d2-478f-8095-23a26770e27f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.340706] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "95dad1e2-74d2-478f-8095-23a26770e27f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.558632] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Successfully updated port: 1649a2d0-32f2-4727-ac50-4c34c9435fea {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 664.558632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 377c76c70ede46d89c8f1bbcb4bd9b9a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 664.565937] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 377c76c70ede46d89c8f1bbcb4bd9b9a [ 664.566589] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "refresh_cache-bf8c692f-6510-4548-aedd-0e1792512e20" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.566719] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired lock "refresh_cache-bf8c692f-6510-4548-aedd-0e1792512e20" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.566865] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 664.567301] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg c0b2e53c1eba4519830d81090ee55907 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 664.580781] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0b2e53c1eba4519830d81090ee55907 [ 664.620426] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 664.815769] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Updating instance_info_cache with network_info: [{"id": "1649a2d0-32f2-4727-ac50-4c34c9435fea", "address": "fa:16:3e:fa:f2:f0", "network": {"id": "a3b98455-bec9-49f9-b508-a0338e3e56a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1274795432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a70e175710d40f1b889d65c5eaca043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1649a2d0-32", "ovs_interfaceid": "1649a2d0-32f2-4727-ac50-4c34c9435fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.816278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 07da9c842fb04049af3fccc00d13411e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 664.826322] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07da9c842fb04049af3fccc00d13411e [ 664.826959] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Releasing lock "refresh_cache-bf8c692f-6510-4548-aedd-0e1792512e20" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.827257] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance network_info: |[{"id": "1649a2d0-32f2-4727-ac50-4c34c9435fea", "address": "fa:16:3e:fa:f2:f0", "network": {"id": "a3b98455-bec9-49f9-b508-a0338e3e56a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1274795432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a70e175710d40f1b889d65c5eaca043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1649a2d0-32", "ovs_interfaceid": "1649a2d0-32f2-4727-ac50-4c34c9435fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 664.827596] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:f2:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1649a2d0-32f2-4727-ac50-4c34c9435fea', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.834921] env[61649]: DEBUG oslo.service.loopingcall [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.835411] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 664.835982] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd430cc5-e338-4d98-acbc-9afb5c298c83 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.857156] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.857156] env[61649]: value = "task-158122" [ 664.857156] env[61649]: _type = "Task" [ 664.857156] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.864688] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158122, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.033503] env[61649]: DEBUG nova.compute.manager [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Received event network-vif-plugged-1649a2d0-32f2-4727-ac50-4c34c9435fea {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 665.033728] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Acquiring lock "bf8c692f-6510-4548-aedd-0e1792512e20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.033936] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Lock "bf8c692f-6510-4548-aedd-0e1792512e20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.034112] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Lock "bf8c692f-6510-4548-aedd-0e1792512e20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.034504] env[61649]: DEBUG nova.compute.manager [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] No waiting events found dispatching network-vif-plugged-1649a2d0-32f2-4727-ac50-4c34c9435fea {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 665.034504] env[61649]: WARNING nova.compute.manager [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Received unexpected event network-vif-plugged-1649a2d0-32f2-4727-ac50-4c34c9435fea for instance with vm_state building and task_state spawning. [ 665.034663] env[61649]: DEBUG nova.compute.manager [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Received event network-changed-1649a2d0-32f2-4727-ac50-4c34c9435fea {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 665.034744] env[61649]: DEBUG nova.compute.manager [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Refreshing instance network info cache due to event network-changed-1649a2d0-32f2-4727-ac50-4c34c9435fea. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 665.035002] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Acquiring lock "refresh_cache-bf8c692f-6510-4548-aedd-0e1792512e20" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.035050] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Acquired lock "refresh_cache-bf8c692f-6510-4548-aedd-0e1792512e20" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.035177] env[61649]: DEBUG nova.network.neutron [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Refreshing network info cache for port 1649a2d0-32f2-4727-ac50-4c34c9435fea {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 665.035661] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Expecting reply to msg 3ef380fbdd5147e0a24ea37d91d69183 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 665.043389] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef380fbdd5147e0a24ea37d91d69183 [ 665.296383] env[61649]: DEBUG nova.network.neutron [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Updated VIF entry in instance network info cache for port 1649a2d0-32f2-4727-ac50-4c34c9435fea. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 665.296738] env[61649]: DEBUG nova.network.neutron [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Updating instance_info_cache with network_info: [{"id": "1649a2d0-32f2-4727-ac50-4c34c9435fea", "address": "fa:16:3e:fa:f2:f0", "network": {"id": "a3b98455-bec9-49f9-b508-a0338e3e56a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1274795432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a70e175710d40f1b889d65c5eaca043", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1649a2d0-32", "ovs_interfaceid": "1649a2d0-32f2-4727-ac50-4c34c9435fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.297262] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Expecting reply to msg aedbe61a44fe47ad801237cfb7383c74 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 665.305734] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aedbe61a44fe47ad801237cfb7383c74 [ 665.306329] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab2262d4-6f29-4cb0-8373-9f62a78d2f94 req-bbd7b640-af96-4fc6-a2ab-6b99dabf4cd6 service nova] Releasing lock "refresh_cache-bf8c692f-6510-4548-aedd-0e1792512e20" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.366465] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158122, 'name': CreateVM_Task, 'duration_secs': 0.282467} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.366633] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 665.367364] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.367710] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.368049] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 665.368300] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e88b7c1-2390-423b-9cdc-383e504e116c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.373018] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 665.373018] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ac5a10-3072-9a4a-058d-28bd2b37e9b6" [ 665.373018] env[61649]: _type = "Task" [ 665.373018] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.381073] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ac5a10-3072-9a4a-058d-28bd2b37e9b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.884351] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.884945] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.885391] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.931179] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.929622] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.929862] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.930021] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.930197] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 702.930359] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 702.930714] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bca683b7ac004262973a8ee8e8a8c8f9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 702.940035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bca683b7ac004262973a8ee8e8a8c8f9 [ 702.940736] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.940945] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.941145] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.941268] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 702.942512] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a5f5ec-efb1-42c7-88dc-4fc48b0b572a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.951173] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635e5e69-03b3-4665-b97b-8fbfc89eb8fa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.965557] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8e1905-7826-44c4-b374-497a288c1893 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.971389] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c6067f-a533-4807-b91d-49504a0a1406 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.999489] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181745MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 702.999638] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.999824] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.000632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 326b09508ab44b7c910050b41c95dce2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.039496] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 326b09508ab44b7c910050b41c95dce2 [ 703.043583] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7df4c30b0e224c3cbd7c01e8346cab78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.052537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7df4c30b0e224c3cbd7c01e8346cab78 [ 703.073894] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29281253-e489-48f5-b219-75ae984adb00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074051] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2c9b8ba9-193e-468f-bc4e-006ab413b374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074183] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aacbebf5-bd31-465b-b574-6c4a98b27f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074308] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7d93caea-4740-4bcd-9f06-9397d37a07b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074427] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074547] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074663] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074776] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.074889] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.075000] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 703.075525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3dfe8708557c47db948111f4a5ea50ad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.087940] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dfe8708557c47db948111f4a5ea50ad [ 703.087940] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.087940] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cfa7f23682ce49d2a3c4c90824f1c826 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.099017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfa7f23682ce49d2a3c4c90824f1c826 [ 703.099736] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.100239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 31d64fe6fce24a03bdbd8f7e0d9d290c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.110283] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31d64fe6fce24a03bdbd8f7e0d9d290c [ 703.110939] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance adc73db6-8bff-4007-ae74-528a37840d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.111402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d0a9ffd74f8a47e8b14ee6f6f8d9145d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.120521] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0a9ffd74f8a47e8b14ee6f6f8d9145d [ 703.121159] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 788fc9bb-2f88-4f82-88cf-9c7a002edb47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.121606] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d1aef483dcfe4ea7940586fc31c14f3f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.130785] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1aef483dcfe4ea7940586fc31c14f3f [ 703.131407] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 62d0a02d-88af-48f2-a14a-c9f2e899babe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.131850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 72b152a688a440fa85a3b3a7110ae776 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.140956] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72b152a688a440fa85a3b3a7110ae776 [ 703.142968] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c7fac9e7-0802-4f2e-a577-4ee50efa835a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.143417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5c2456f7a782498eacad8f7eb0b5eefe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.154381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2456f7a782498eacad8f7eb0b5eefe [ 703.154460] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b9315087-b61c-488c-aaa9-5f4b4e2f12b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.155044] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 50b86ab37c70424db91e302e6fd37cea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.163909] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50b86ab37c70424db91e302e6fd37cea [ 703.164569] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 3889663a-53e8-4d3e-bed6-5e86519522ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.165021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 99fea967211d41d0a7fc302bef0100b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.174376] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99fea967211d41d0a7fc302bef0100b0 [ 703.174996] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 833dbc2a-a434-4ca1-aa33-b48a910c0e91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.175475] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a651783d8e6e42dd8de130eb07962363 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.184650] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a651783d8e6e42dd8de130eb07962363 [ 703.185289] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8819ddfb-4286-455b-8216-05e89424183e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.185731] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7658c7ac38534cbda12871898f7c8445 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.194693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7658c7ac38534cbda12871898f7c8445 [ 703.195361] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.195800] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 424bfe817bcd486e916935a188ff0ae1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.208720] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 424bfe817bcd486e916935a188ff0ae1 [ 703.209401] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fc905ad2-7f1d-4356-a8f7-1eda98cdd01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.209846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b39b4f6c531a4812bcff309aa6776bf1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.218907] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b39b4f6c531a4812bcff309aa6776bf1 [ 703.219558] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4868f1fe-04d3-4055-bf61-8a46723cf573 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.219998] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d821ce0b68ee45c6b99147577e230862 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.229418] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d821ce0b68ee45c6b99147577e230862 [ 703.230041] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fffdaf35-4e17-40ba-95a7-cf34fa04737e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.230468] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e465157256d945feadb3a0a4c8de37cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.239407] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e465157256d945feadb3a0a4c8de37cb [ 703.240017] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8a0b66-22ef-4f1e-99a3-9727d4a61c02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.240452] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2dbe9753debc4579871ba468bb0c4a29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.251435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dbe9753debc4579871ba468bb0c4a29 [ 703.252074] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6126223b-c712-4260-a49b-7a56c4035e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.252515] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3f1c3ca3c9b6472884b101c90e4136c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.262624] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f1c3ca3c9b6472884b101c90e4136c5 [ 703.263482] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.264137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c6951a532ae847529d4c37301e23d7c1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.276537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6951a532ae847529d4c37301e23d7c1 [ 703.276537] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9142a98b-6400-4cd2-b21f-29a435f95503 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.276537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9c422bf5f2ae42538d725281953e7bfc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.285823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c422bf5f2ae42538d725281953e7bfc [ 703.285823] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8999b9ee-ae7e-4438-80b7-dffdb3e92630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.285823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cc3194e78ac049ca8122cc6d2ecdcf47 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.296555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc3194e78ac049ca8122cc6d2ecdcf47 [ 703.297270] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 018ab9c2-8c6d-4836-9e26-70ffc33b9b30 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.297724] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fdf4d8c6c80d405f9e99c49c8a15873d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.307181] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdf4d8c6c80d405f9e99c49c8a15873d [ 703.307836] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 93bf61a5-0737-4495-854d-14f1feebab86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.308332] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cb8e40d8b86f4a96845615ba46263adb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.317448] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb8e40d8b86f4a96845615ba46263adb [ 703.318079] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f814cbd2-8d20-4a26-9bae-000a70a3e082 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.318517] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0e30dca84f4a442daeff51ff015d39e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.329439] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e30dca84f4a442daeff51ff015d39e0 [ 703.330082] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fc6dad2-0cde-46db-b840-3bd2737a91af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.331185] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 72039b4d240e42a9bb12eaa952dec5f4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.341699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72039b4d240e42a9bb12eaa952dec5f4 [ 703.341699] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.341699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b5d3eec8ce144223aaf9b89f6bf6fa72 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.350159] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5d3eec8ce144223aaf9b89f6bf6fa72 [ 703.350804] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28428441-219c-4627-857e-ab8b91390c68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.351245] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8e485640c0ac4dd7aa6a13323a109f34 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.360361] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e485640c0ac4dd7aa6a13323a109f34 [ 703.360993] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5e9ab69f-856e-4b8d-808a-0799b87a9cc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.361458] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ca0c0f58e77141c1b9d5bf0f4408dca9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.370202] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca0c0f58e77141c1b9d5bf0f4408dca9 [ 703.370920] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b176b001-3c32-439a-b6cd-9b608a0ac623 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.371490] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cdb38322f1d74cf0820e8577b6c09a28 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.380791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdb38322f1d74cf0820e8577b6c09a28 [ 703.381446] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95dad1e2-74d2-478f-8095-23a26770e27f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.381745] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 703.381957] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 703.830408] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dac3e21-30cf-47e6-83f0-b5dc65f433e0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.838203] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f368eb8c-ee54-42eb-b4c5-3f92dc7bec29 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.866455] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16541265-ca0b-49c3-b005-ae5ae3df3d54 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.873584] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4605177-b43d-403c-a582-f17862c5e8e1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.886602] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.887043] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg db08ffe92caf436f94ff5adea1bb6475 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.894168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db08ffe92caf436f94ff5adea1bb6475 [ 703.895565] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 703.897803] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2d5bb06bc44e456b92a1247987c1b1e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 703.907978] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d5bb06bc44e456b92a1247987c1b1e0 [ 703.908585] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 703.908756] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.909s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.903198] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.903471] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.929335] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.929505] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 704.929624] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 704.930261] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3fb159871f734144841eef29f082205d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 704.949746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fb159871f734144841eef29f082205d [ 704.951967] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29281253-e489-48f5-b219-75ae984adb00] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952140] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952277] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952402] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952522] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952641] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952758] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952874] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.952990] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.953104] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 704.953228] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 704.953684] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.867058] env[61649]: WARNING oslo_vmware.rw_handles [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 709.867058] env[61649]: ERROR oslo_vmware.rw_handles [ 709.867818] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 709.869953] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 709.869953] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Copying Virtual Disk [datastore1] vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/7f80dc92-07ec-4811-8da4-1f1bd5d2382d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 709.869953] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9512f38f-229f-41f8-9c19-4f158da978b4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.878389] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Waiting for the task: (returnval){ [ 709.878389] env[61649]: value = "task-158123" [ 709.878389] env[61649]: _type = "Task" [ 709.878389] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.885922] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Task: {'id': task-158123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.388717] env[61649]: DEBUG oslo_vmware.exceptions [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 710.389005] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.389570] env[61649]: ERROR nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 710.389570] env[61649]: Faults: ['InvalidArgument'] [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Traceback (most recent call last): [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] yield resources [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self.driver.spawn(context, instance, image_meta, [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self._fetch_image_if_missing(context, vi) [ 710.389570] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] image_cache(vi, tmp_image_ds_loc) [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] vm_util.copy_virtual_disk( [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] session._wait_for_task(vmdk_copy_task) [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] return self.wait_for_task(task_ref) [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] return evt.wait() [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] result = hub.switch() [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 710.389905] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] return self.greenlet.switch() [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self.f(*self.args, **self.kw) [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] raise exceptions.translate_fault(task_info.error) [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Faults: ['InvalidArgument'] [ 710.390227] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] [ 710.390227] env[61649]: INFO nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Terminating instance [ 710.391427] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.391633] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.391870] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb36a82c-ada6-404c-8d4f-005bfe1647c3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.393917] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "refresh_cache-7d93caea-4740-4bcd-9f06-9397d37a07b2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.394078] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquired lock "refresh_cache-7d93caea-4740-4bcd-9f06-9397d37a07b2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.394248] env[61649]: DEBUG nova.network.neutron [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 710.394654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 22fedca5cf1d409f917d6d8d0adab737 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 710.401364] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.401696] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 710.402223] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88613020-f625-466d-95b4-dbb88dd3df64 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.405495] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22fedca5cf1d409f917d6d8d0adab737 [ 710.409226] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Waiting for the task: (returnval){ [ 710.409226] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]528d655c-157d-9cf6-f99f-e1c366bcfc72" [ 710.409226] env[61649]: _type = "Task" [ 710.409226] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.417502] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]528d655c-157d-9cf6-f99f-e1c366bcfc72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.428618] env[61649]: DEBUG nova.network.neutron [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 710.483017] env[61649]: DEBUG nova.network.neutron [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.483577] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg b93a90e1e3134dfa8125651ba0c9bbee in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 710.491437] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b93a90e1e3134dfa8125651ba0c9bbee [ 710.492026] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Releasing lock "refresh_cache-7d93caea-4740-4bcd-9f06-9397d37a07b2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.492468] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 710.492725] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 710.493884] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b01695-416c-4367-a05e-b2881f0c3f40 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.501644] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 710.501919] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79be36f8-a619-4c75-8998-a9dbdd2bc2c9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.530444] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 710.530671] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 710.530854] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Deleting the datastore file [datastore1] 7d93caea-4740-4bcd-9f06-9397d37a07b2 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.531106] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71919c09-6247-478d-bbd8-d10e762d5948 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.537121] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Waiting for the task: (returnval){ [ 710.537121] env[61649]: value = "task-158125" [ 710.537121] env[61649]: _type = "Task" [ 710.537121] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.544762] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Task: {'id': task-158125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.927250] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 710.927250] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Creating directory with path [datastore1] vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.927250] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f84549af-95c0-4f05-bbb2-4f9955fb6a8b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.940142] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Created directory with path [datastore1] vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.940352] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Fetch image to [datastore1] vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 710.940524] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 710.941250] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68eb45da-519b-44f7-94f7-a131d04f5d74 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.949153] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ee639a-6171-4b09-8459-f860250a8601 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.957981] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a555b12-a4ce-45cc-acfb-6e6e9779b20a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.989086] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ee9de1-ac0a-4005-a11f-695d2da30682 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.995001] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-586aea85-c2a6-4283-a1cd-292bb7fabafb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.017895] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 711.048914] env[61649]: DEBUG oslo_vmware.api [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Task: {'id': task-158125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041014} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.049253] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 711.049372] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 711.049580] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 711.049706] env[61649]: INFO nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Took 0.56 seconds to destroy the instance on the hypervisor. [ 711.049941] env[61649]: DEBUG oslo.service.loopingcall [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.050149] env[61649]: DEBUG nova.compute.manager [-] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Skipping network deallocation for instance since networking was not requested. {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 711.053460] env[61649]: DEBUG nova.compute.claims [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 711.053887] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.054122] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.057026] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg a42dbe60687f4276ae2136ed594eb222 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.089581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a42dbe60687f4276ae2136ed594eb222 [ 711.184364] env[61649]: DEBUG oslo_vmware.rw_handles [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 711.245804] env[61649]: DEBUG oslo_vmware.rw_handles [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 711.245804] env[61649]: DEBUG oslo_vmware.rw_handles [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 711.583260] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b8ad15-5409-4fef-a58f-f172ded93f6d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.591176] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775d53d8-7c81-4fbe-a30a-a8f6b764aaeb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.620986] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a968215-97ae-4c65-a05a-4d79fe8356b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.628865] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05cf06bf-659a-43b1-891b-cfc8c1826a44 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.641740] env[61649]: DEBUG nova.compute.provider_tree [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.642224] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg d2760e2554d1409db64cf2c92707940f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.649281] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2760e2554d1409db64cf2c92707940f [ 711.650296] env[61649]: DEBUG nova.scheduler.client.report [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 711.652638] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 3636031328cd4dd6b31259174a21426e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.663436] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3636031328cd4dd6b31259174a21426e [ 711.664151] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.610s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.664714] env[61649]: ERROR nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 711.664714] env[61649]: Faults: ['InvalidArgument'] [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Traceback (most recent call last): [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self.driver.spawn(context, instance, image_meta, [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self._fetch_image_if_missing(context, vi) [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] image_cache(vi, tmp_image_ds_loc) [ 711.664714] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] vm_util.copy_virtual_disk( [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] session._wait_for_task(vmdk_copy_task) [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] return self.wait_for_task(task_ref) [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] return evt.wait() [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] result = hub.switch() [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] return self.greenlet.switch() [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 711.665071] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] self.f(*self.args, **self.kw) [ 711.665423] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 711.665423] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] raise exceptions.translate_fault(task_info.error) [ 711.665423] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 711.665423] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Faults: ['InvalidArgument'] [ 711.665423] env[61649]: ERROR nova.compute.manager [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] [ 711.665423] env[61649]: DEBUG nova.compute.utils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 711.666718] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Build of instance 7d93caea-4740-4bcd-9f06-9397d37a07b2 was re-scheduled: A specified parameter was not correct: fileType [ 711.666718] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 711.667090] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 711.667320] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquiring lock "refresh_cache-7d93caea-4740-4bcd-9f06-9397d37a07b2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.667461] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Acquired lock "refresh_cache-7d93caea-4740-4bcd-9f06-9397d37a07b2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.667613] env[61649]: DEBUG nova.network.neutron [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 711.667980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg e3f92fc29f474126a276fb26ee67221e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.674373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3f92fc29f474126a276fb26ee67221e [ 711.696858] env[61649]: DEBUG nova.network.neutron [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 711.768531] env[61649]: DEBUG nova.network.neutron [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.769116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg c5f3898943b14d23a8983521484ce163 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.778133] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5f3898943b14d23a8983521484ce163 [ 711.778771] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Releasing lock "refresh_cache-7d93caea-4740-4bcd-9f06-9397d37a07b2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.778995] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 711.779195] env[61649]: DEBUG nova.compute.manager [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] [instance: 7d93caea-4740-4bcd-9f06-9397d37a07b2] Skipping network deallocation for instance since networking was not requested. {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 711.782224] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 8cc11947dfe049328d2fc77aeab387c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.815472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cc11947dfe049328d2fc77aeab387c8 [ 711.817971] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 6ef15d17b4e7466bbd46e597429f7c5b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.850548] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ef15d17b4e7466bbd46e597429f7c5b [ 711.874192] env[61649]: INFO nova.scheduler.client.report [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Deleted allocations for instance 7d93caea-4740-4bcd-9f06-9397d37a07b2 [ 711.879969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Expecting reply to msg 038509f548a44d8cab649688e5d9ca52 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.892034] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 038509f548a44d8cab649688e5d9ca52 [ 711.892387] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34986cd9-2b3a-4844-9ce9-49ad49535d58 tempest-ServerDiagnosticsV248Test-603418949 tempest-ServerDiagnosticsV248Test-603418949-project-member] Lock "7d93caea-4740-4bcd-9f06-9397d37a07b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.007s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.892904] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 98620f57f1e9487595d9915f07a39dfe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.925893] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98620f57f1e9487595d9915f07a39dfe [ 711.926442] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 711.928121] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg c66f75c735a94ca19f6e48dc5807892b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 711.960591] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c66f75c735a94ca19f6e48dc5807892b [ 711.975826] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.976118] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.977578] env[61649]: INFO nova.compute.claims [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.979156] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 9bad8d6996c34b3c864d111fea6cb639 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.009358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bad8d6996c34b3c864d111fea6cb639 [ 712.011565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg cb7c71804e3a4beea197922ec331103c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.018674] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb7c71804e3a4beea197922ec331103c [ 712.447658] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c650d59c-75d3-41aa-841f-622b64f6b44d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.455929] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef99e2c-d804-4cf5-a912-dde7ce696b5c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.485546] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8931e2-8759-4d6f-99f2-f8ff657ac444 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.492743] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09409c0d-2db0-44c8-b1dc-a6e4aca16f4b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.505854] env[61649]: DEBUG nova.compute.provider_tree [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.506363] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg fa0320cfda904324b6cfd5bfe43d2819 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.513481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa0320cfda904324b6cfd5bfe43d2819 [ 712.514787] env[61649]: DEBUG nova.scheduler.client.report [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 712.516821] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 9a28ae87375a4c11ad020afe83d995a3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.530759] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a28ae87375a4c11ad020afe83d995a3 [ 712.530759] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.554s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.530991] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 712.532701] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 1c380376d095452caf94d3dc559c6bde in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.564709] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c380376d095452caf94d3dc559c6bde [ 712.566328] env[61649]: DEBUG nova.compute.utils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 712.566930] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 56f6a51aca6442559a711f5eda0c4b0b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.567927] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 712.568082] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 712.577703] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56f6a51aca6442559a711f5eda0c4b0b [ 712.578357] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 712.580090] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 12a5c473069a4ed482b391a99fcc8775 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.610314] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12a5c473069a4ed482b391a99fcc8775 [ 712.612991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 0cd28ebae0034839bf7131ef7aaa2a0e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 712.629550] env[61649]: DEBUG nova.policy [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e052990345b74fe189a14d94f8c9b734', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '699aeda85119429e8454dfc82da1a98d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 712.646754] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cd28ebae0034839bf7131ef7aaa2a0e [ 712.647888] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 712.668250] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.668510] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.668664] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.668850] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.671751] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.671751] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.671751] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.671751] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.671751] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.671941] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.671941] env[61649]: DEBUG nova.virt.hardware [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.671941] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de69a6c-71d3-459b-b2e8-4fcee4d892a3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.679110] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874b5ef5-818e-46ab-8bbe-42190bb7e3df {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.114888] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Successfully created port: e5254fca-ad6f-4768-9eb0-89b67e6c499f {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.996833] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Successfully updated port: e5254fca-ad6f-4768-9eb0-89b67e6c499f {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.997343] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 1983ba02c09a4352ae4a899dcbd56dfe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 714.005622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1983ba02c09a4352ae4a899dcbd56dfe [ 714.006279] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "refresh_cache-e5fe92cf-e150-419f-a164-a98a9d24dd8c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.006409] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquired lock "refresh_cache-e5fe92cf-e150-419f-a164-a98a9d24dd8c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.006559] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 714.006934] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 53ffcfdfa32a436b888b0dc36c7dda9c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 714.014491] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53ffcfdfa32a436b888b0dc36c7dda9c [ 714.058043] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 714.174550] env[61649]: DEBUG nova.compute.manager [req-b68dc7f1-4084-4adb-914d-9a05d78c1036 req-036d2e6a-3be2-48b4-9fb8-2ef9a3835736 service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Received event network-vif-plugged-e5254fca-ad6f-4768-9eb0-89b67e6c499f {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 714.174789] env[61649]: DEBUG oslo_concurrency.lockutils [req-b68dc7f1-4084-4adb-914d-9a05d78c1036 req-036d2e6a-3be2-48b4-9fb8-2ef9a3835736 service nova] Acquiring lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.174980] env[61649]: DEBUG oslo_concurrency.lockutils [req-b68dc7f1-4084-4adb-914d-9a05d78c1036 req-036d2e6a-3be2-48b4-9fb8-2ef9a3835736 service nova] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.175147] env[61649]: DEBUG oslo_concurrency.lockutils [req-b68dc7f1-4084-4adb-914d-9a05d78c1036 req-036d2e6a-3be2-48b4-9fb8-2ef9a3835736 service nova] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.175313] env[61649]: DEBUG nova.compute.manager [req-b68dc7f1-4084-4adb-914d-9a05d78c1036 req-036d2e6a-3be2-48b4-9fb8-2ef9a3835736 service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] No waiting events found dispatching network-vif-plugged-e5254fca-ad6f-4768-9eb0-89b67e6c499f {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 714.175474] env[61649]: WARNING nova.compute.manager [req-b68dc7f1-4084-4adb-914d-9a05d78c1036 req-036d2e6a-3be2-48b4-9fb8-2ef9a3835736 service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Received unexpected event network-vif-plugged-e5254fca-ad6f-4768-9eb0-89b67e6c499f for instance with vm_state building and task_state spawning. [ 714.258786] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Updating instance_info_cache with network_info: [{"id": "e5254fca-ad6f-4768-9eb0-89b67e6c499f", "address": "fa:16:3e:d9:c8:04", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5254fca-ad", "ovs_interfaceid": "e5254fca-ad6f-4768-9eb0-89b67e6c499f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.259436] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 089ec610b6dd491eb7b74a178a1c146c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 714.269347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 089ec610b6dd491eb7b74a178a1c146c [ 714.269944] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Releasing lock "refresh_cache-e5fe92cf-e150-419f-a164-a98a9d24dd8c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.270265] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance network_info: |[{"id": "e5254fca-ad6f-4768-9eb0-89b67e6c499f", "address": "fa:16:3e:d9:c8:04", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5254fca-ad", "ovs_interfaceid": "e5254fca-ad6f-4768-9eb0-89b67e6c499f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 714.270674] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:c8:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5254fca-ad6f-4768-9eb0-89b67e6c499f', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.281269] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Creating folder: Project (699aeda85119429e8454dfc82da1a98d). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.282287] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f4a0c92-2538-4186-b3a6-e0211e67cd42 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.292505] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Created folder: Project (699aeda85119429e8454dfc82da1a98d) in parent group-v51588. [ 714.292698] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Creating folder: Instances. Parent ref: group-v51623. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.292914] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce786b1a-8cd6-4481-93c8-d0331ccdbfe3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.303284] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Created folder: Instances in parent group-v51623. [ 714.303504] env[61649]: DEBUG oslo.service.loopingcall [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.303681] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 714.303868] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7997177b-f330-4d5f-9b60-b9c9548d6155 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.322768] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.322768] env[61649]: value = "task-158128" [ 714.322768] env[61649]: _type = "Task" [ 714.322768] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.330757] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158128, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.832557] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158128, 'name': CreateVM_Task, 'duration_secs': 0.291118} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.832738] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 714.833408] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.833576] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.833895] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 714.834143] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec540b4d-df9a-44b4-bfd7-13df6f6df740 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.838489] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Waiting for the task: (returnval){ [ 714.838489] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]526ebce2-50fa-7ba3-1fd0-ed1956c73aa5" [ 714.838489] env[61649]: _type = "Task" [ 714.838489] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.846001] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]526ebce2-50fa-7ba3-1fd0-ed1956c73aa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.353664] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.353937] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.354284] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.455720] env[61649]: DEBUG nova.compute.manager [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Received event network-changed-e5254fca-ad6f-4768-9eb0-89b67e6c499f {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 716.455953] env[61649]: DEBUG nova.compute.manager [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Refreshing instance network info cache due to event network-changed-e5254fca-ad6f-4768-9eb0-89b67e6c499f. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 716.456146] env[61649]: DEBUG oslo_concurrency.lockutils [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] Acquiring lock "refresh_cache-e5fe92cf-e150-419f-a164-a98a9d24dd8c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.456285] env[61649]: DEBUG oslo_concurrency.lockutils [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] Acquired lock "refresh_cache-e5fe92cf-e150-419f-a164-a98a9d24dd8c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.456445] env[61649]: DEBUG nova.network.neutron [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Refreshing network info cache for port e5254fca-ad6f-4768-9eb0-89b67e6c499f {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 716.456923] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] Expecting reply to msg 8239dbf8e9204f06a6b910dd8995e63f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 716.464029] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8239dbf8e9204f06a6b910dd8995e63f [ 717.190065] env[61649]: DEBUG nova.network.neutron [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Updated VIF entry in instance network info cache for port e5254fca-ad6f-4768-9eb0-89b67e6c499f. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 717.190065] env[61649]: DEBUG nova.network.neutron [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Updating instance_info_cache with network_info: [{"id": "e5254fca-ad6f-4768-9eb0-89b67e6c499f", "address": "fa:16:3e:d9:c8:04", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5254fca-ad", "ovs_interfaceid": "e5254fca-ad6f-4768-9eb0-89b67e6c499f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.190289] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] Expecting reply to msg e8a0c870d2204e71b681e2a3f1c40fb6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 717.197668] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8a0c870d2204e71b681e2a3f1c40fb6 [ 717.198359] env[61649]: DEBUG oslo_concurrency.lockutils [req-3b50eb21-3664-45ba-97e4-877b317a5a59 req-e45a631a-e048-45e1-bdc7-da1317e1c8fc service nova] Releasing lock "refresh_cache-e5fe92cf-e150-419f-a164-a98a9d24dd8c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.604687] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.604922] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.690605] env[61649]: WARNING oslo_vmware.rw_handles [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 760.690605] env[61649]: ERROR oslo_vmware.rw_handles [ 760.691231] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 760.692851] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 760.694119] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Copying Virtual Disk [datastore1] vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/ef5c1149-1ba6-4cce-9f01-89c958ac53fc/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 760.694119] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43db151e-c2d4-4d86-b249-6c563d5b0863 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.703008] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Waiting for the task: (returnval){ [ 760.703008] env[61649]: value = "task-158129" [ 760.703008] env[61649]: _type = "Task" [ 760.703008] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.712703] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Task: {'id': task-158129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.213653] env[61649]: DEBUG oslo_vmware.exceptions [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 761.213992] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.214539] env[61649]: ERROR nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 761.214539] env[61649]: Faults: ['InvalidArgument'] [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Traceback (most recent call last): [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] yield resources [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self.driver.spawn(context, instance, image_meta, [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self._fetch_image_if_missing(context, vi) [ 761.214539] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] image_cache(vi, tmp_image_ds_loc) [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] vm_util.copy_virtual_disk( [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] session._wait_for_task(vmdk_copy_task) [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] return self.wait_for_task(task_ref) [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] return evt.wait() [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] result = hub.switch() [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 761.214869] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] return self.greenlet.switch() [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self.f(*self.args, **self.kw) [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] raise exceptions.translate_fault(task_info.error) [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Faults: ['InvalidArgument'] [ 761.215240] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] [ 761.215240] env[61649]: INFO nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Terminating instance [ 761.216453] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.217832] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.217832] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcdd0e5d-49f2-40db-b83c-4c8374e070d7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.219083] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 761.219304] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 761.220040] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7720ee-de53-49ef-b9fc-bd0717431af7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.227168] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 761.227400] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ac04926-9f83-48f1-b524-1839f383dce8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.229801] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.229969] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 761.230908] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-769dd23c-27e5-4d7f-a954-7e1671108bcd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.235280] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Waiting for the task: (returnval){ [ 761.235280] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]520da348-c5bc-d5ab-3a21-6444876e2f9a" [ 761.235280] env[61649]: _type = "Task" [ 761.235280] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.251374] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 761.251374] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Creating directory with path [datastore1] vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.251374] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-441fc738-3a0a-46df-ac01-9eedcca7513c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.271558] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Created directory with path [datastore1] vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.271800] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Fetch image to [datastore1] vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 761.271975] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 761.272771] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a91fa2-4962-4b5d-a8ad-7df350618a1d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.279822] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819a844b-8a06-4943-90d5-e0f3757ce778 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.290965] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a3d79b-d9ec-4162-8a96-4b848d9df115 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.295712] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 761.296091] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 761.296385] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Deleting the datastore file [datastore1] 2c9b8ba9-193e-468f-bc4e-006ab413b374 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 761.297064] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e47b4d3b-bdd8-43ea-8e1d-5f04510d4ba9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.326140] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d5d59e-e39a-4466-add3-4b9620058947 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.332252] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Waiting for the task: (returnval){ [ 761.332252] env[61649]: value = "task-158131" [ 761.332252] env[61649]: _type = "Task" [ 761.332252] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.337501] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8e5be5e7-7ae4-416c-971d-688fa00d55ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.341789] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Task: {'id': task-158131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.360640] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 761.420237] env[61649]: DEBUG oslo_vmware.rw_handles [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 761.481063] env[61649]: DEBUG oslo_vmware.rw_handles [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 761.481269] env[61649]: DEBUG oslo_vmware.rw_handles [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 761.844410] env[61649]: DEBUG oslo_vmware.api [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Task: {'id': task-158131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069375} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.844678] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.844835] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 761.845004] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 761.845177] env[61649]: INFO nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Took 0.63 seconds to destroy the instance on the hypervisor. [ 761.847171] env[61649]: DEBUG nova.compute.claims [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 761.847341] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.847549] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.849446] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 826abd0886f5480cbbfebd9f65ae5f48 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 761.882120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 826abd0886f5480cbbfebd9f65ae5f48 [ 761.948039] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.948698] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 10df0aa6ed56498ca3e29e329c35853f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 761.967534] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10df0aa6ed56498ca3e29e329c35853f [ 762.305992] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9114db26-881a-4d03-ac0a-f5a7148a51b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.314593] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d51c08c-10cb-44ae-aaf0-d06c51eb4125 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.343674] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec65681-947c-41c5-b10f-435dcd39167d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.351454] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8ba0bc-8c9d-46e2-8985-1e07d165f884 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.364255] env[61649]: DEBUG nova.compute.provider_tree [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.364766] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 2395e72842644672993485de282761f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.372069] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2395e72842644672993485de282761f7 [ 762.373001] env[61649]: DEBUG nova.scheduler.client.report [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 762.375437] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg c45211b3609c4b2e93726dc985210519 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.385634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c45211b3609c4b2e93726dc985210519 [ 762.386269] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.539s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.386753] env[61649]: ERROR nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 762.386753] env[61649]: Faults: ['InvalidArgument'] [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Traceback (most recent call last): [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self.driver.spawn(context, instance, image_meta, [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self._fetch_image_if_missing(context, vi) [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] image_cache(vi, tmp_image_ds_loc) [ 762.386753] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] vm_util.copy_virtual_disk( [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] session._wait_for_task(vmdk_copy_task) [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] return self.wait_for_task(task_ref) [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] return evt.wait() [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] result = hub.switch() [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] return self.greenlet.switch() [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 762.387411] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] self.f(*self.args, **self.kw) [ 762.388314] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 762.388314] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] raise exceptions.translate_fault(task_info.error) [ 762.388314] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 762.388314] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Faults: ['InvalidArgument'] [ 762.388314] env[61649]: ERROR nova.compute.manager [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] [ 762.388314] env[61649]: DEBUG nova.compute.utils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 762.389196] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Build of instance 2c9b8ba9-193e-468f-bc4e-006ab413b374 was re-scheduled: A specified parameter was not correct: fileType [ 762.389196] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 762.389553] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 762.389737] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 762.389908] env[61649]: DEBUG nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 762.390157] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 762.729164] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg b174b0cbbb264a28845b2bba6ad518b6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.741176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b174b0cbbb264a28845b2bba6ad518b6 [ 762.741350] env[61649]: DEBUG nova.network.neutron [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.741817] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 89ad6d6f089549619494c5e2c8766f27 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.755194] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89ad6d6f089549619494c5e2c8766f27 [ 762.755909] env[61649]: INFO nova.compute.manager [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] [instance: 2c9b8ba9-193e-468f-bc4e-006ab413b374] Took 0.37 seconds to deallocate network for instance. [ 762.757725] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 1a6f7727e2e94764896f18f3cbce6066 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.794883] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a6f7727e2e94764896f18f3cbce6066 [ 762.794883] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg 73e145f7f39748618deb6be9271df5df in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.828333] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73e145f7f39748618deb6be9271df5df [ 762.850828] env[61649]: INFO nova.scheduler.client.report [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Deleted allocations for instance 2c9b8ba9-193e-468f-bc4e-006ab413b374 [ 762.858067] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Expecting reply to msg b392076c09e54db4865a70db49a42549 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.869796] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b392076c09e54db4865a70db49a42549 [ 762.870515] env[61649]: DEBUG oslo_concurrency.lockutils [None req-667efbd1-c2cb-47fd-a987-3d003b6dc82f tempest-ImagesOneServerTestJSON-893723329 tempest-ImagesOneServerTestJSON-893723329-project-member] Lock "2c9b8ba9-193e-468f-bc4e-006ab413b374" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 192.839s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.871201] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 85a725b8a7b84abf90fea1daef786b4a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.888468] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85a725b8a7b84abf90fea1daef786b4a [ 762.888884] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 762.890749] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg d57471ac18a74735beb068a61d052c10 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.923649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d57471ac18a74735beb068a61d052c10 [ 762.929381] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.929862] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 21ea3830f3174f758115be8a8faa26de in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.940031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21ea3830f3174f758115be8a8faa26de [ 762.940944] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.941494] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.941494] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.941610] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 762.942653] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.942901] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.944728] env[61649]: INFO nova.compute.claims [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.946355] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 0c18f01688a3411b9c785c26b1409f8c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.948476] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc859d4e-b406-4659-a4a3-c241340efa1d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.957822] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70300c88-2dac-4cdb-a84e-1f30ce6b1c4a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.976824] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1015d01-0895-4d38-ab07-edfba005a337 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.979632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c18f01688a3411b9c785c26b1409f8c [ 762.981129] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg db6fd2c3526544e0bb8ec899635cfc73 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 762.986725] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04db9c57-6406-42d2-9997-4aa5c6d65804 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.990993] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db6fd2c3526544e0bb8ec899635cfc73 [ 763.020447] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181812MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 763.020606] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.451658] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b623554-867d-4952-8b45-db206ebff38c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.459517] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df43fcb9-b2c6-4ec3-9bd0-8712a34b08ee {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.490617] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591f5ba9-2490-4660-bb6c-f1fa907f2c62 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.498456] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5263a872-75e5-4e94-98fa-7da8c20125c9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.511745] env[61649]: DEBUG nova.compute.provider_tree [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.512251] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 59f0811cda0e4a7ca4af0e4e60cd049b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.519973] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59f0811cda0e4a7ca4af0e4e60cd049b [ 763.520897] env[61649]: DEBUG nova.scheduler.client.report [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 763.523546] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 05e683c3467f453cb0d5d285f1bf9888 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.534505] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05e683c3467f453cb0d5d285f1bf9888 [ 763.535212] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.592s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.535672] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 763.538120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 0339e7fac2e44bc2a947298259bf7797 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.544460] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.518s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.545281] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 782385c0dc084040bb0dd8250a7add2d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.568307] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0339e7fac2e44bc2a947298259bf7797 [ 763.569621] env[61649]: DEBUG nova.compute.utils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 763.570256] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 244bb0f0a3e84f66809efac1c1975657 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.571040] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 763.571208] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 763.577061] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 782385c0dc084040bb0dd8250a7add2d [ 763.581068] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e8e914a1a97646ddb07804a11c49b8d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.582156] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 244bb0f0a3e84f66809efac1c1975657 [ 763.582649] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 763.584568] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg c094cc29c67b4420b940af488ee86680 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.591581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8e914a1a97646ddb07804a11c49b8d2 [ 763.619189] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29281253-e489-48f5-b219-75ae984adb00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.619367] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aacbebf5-bd31-465b-b574-6c4a98b27f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.619507] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.619667] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.619795] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.619908] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.620034] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.620159] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.620293] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.620381] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 763.620978] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d9e7bdedfe58417098c527843a89b7e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.627797] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c094cc29c67b4420b940af488ee86680 [ 763.630619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg c3618aa6fbbf4ad4839855be499ae256 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.631678] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9e7bdedfe58417098c527843a89b7e3 [ 763.632342] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance adc73db6-8bff-4007-ae74-528a37840d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.632786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 29207462f5bc4007bcac7d8f11ede580 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.641104] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29207462f5bc4007bcac7d8f11ede580 [ 763.641674] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 788fc9bb-2f88-4f82-88cf-9c7a002edb47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.642116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e48fd8b9d5024674bf02ffdd1ae6a33d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.659890] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e48fd8b9d5024674bf02ffdd1ae6a33d [ 763.660659] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 62d0a02d-88af-48f2-a14a-c9f2e899babe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.661118] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 947a48eb4d9b43a4ab6bf7bad8939fa4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.668976] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3618aa6fbbf4ad4839855be499ae256 [ 763.670220] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 763.683226] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 947a48eb4d9b43a4ab6bf7bad8939fa4 [ 763.683226] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c7fac9e7-0802-4f2e-a577-4ee50efa835a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.683226] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1f438037a0fa42eeb93d8cff95cc25f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.696158] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.696452] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.696614] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.696797] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.696945] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.697206] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.697455] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.697666] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.697850] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.698016] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.698189] env[61649]: DEBUG nova.virt.hardware [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.699044] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f438037a0fa42eeb93d8cff95cc25f7 [ 763.699937] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90eff799-10d9-4c50-9d71-8c1efee1a9f2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.710379] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b9315087-b61c-488c-aaa9-5f4b4e2f12b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.710379] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 84c978bbd3f845e986f31b6dada2436f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.712189] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930eb76b-aaa7-4ea0-abc6-fc04c033bb0f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.716819] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84c978bbd3f845e986f31b6dada2436f [ 763.717604] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 3889663a-53e8-4d3e-bed6-5e86519522ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.718106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b1c548b96ae247749c33d4d6b12fd2ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.729008] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1c548b96ae247749c33d4d6b12fd2ab [ 763.729889] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 833dbc2a-a434-4ca1-aa33-b48a910c0e91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.730372] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d8658a8ff9334a359083196be4922df4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.738467] env[61649]: DEBUG nova.policy [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f75696f03de42d4a907d815dfaec43f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd0b5bbd9dc6240abb9b778ae8cc529d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 763.741335] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8658a8ff9334a359083196be4922df4 [ 763.741963] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8819ddfb-4286-455b-8216-05e89424183e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.742419] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d2e21efce0c9440f84d05805ddd296d0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.751239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2e21efce0c9440f84d05805ddd296d0 [ 763.752057] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.752299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7c0c02f6c8f14178a04f4a6f165ca449 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.760962] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c0c02f6c8f14178a04f4a6f165ca449 [ 763.761611] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fc905ad2-7f1d-4356-a8f7-1eda98cdd01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.762047] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cd6d54cbc8244b1a9313740d7341084b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.770924] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd6d54cbc8244b1a9313740d7341084b [ 763.771595] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4868f1fe-04d3-4055-bf61-8a46723cf573 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.772047] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4c78ede30ddf42ccb05ff31174e98416 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.784427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c78ede30ddf42ccb05ff31174e98416 [ 763.785143] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fffdaf35-4e17-40ba-95a7-cf34fa04737e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.785588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bfb2ac9039e2438e98d8934fda58ea6d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.794453] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfb2ac9039e2438e98d8934fda58ea6d [ 763.795102] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8a0b66-22ef-4f1e-99a3-9727d4a61c02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.795540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b69812494921406585330b799a097b80 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.805790] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b69812494921406585330b799a097b80 [ 763.805790] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6126223b-c712-4260-a49b-7a56c4035e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.806228] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e278530b4ca34f43b016cbf5e17e9de0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.818628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e278530b4ca34f43b016cbf5e17e9de0 [ 763.819291] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.819741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fb158a2f78fa40df81261783c8cc5a73 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.829008] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb158a2f78fa40df81261783c8cc5a73 [ 763.829738] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9142a98b-6400-4cd2-b21f-29a435f95503 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.830264] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cd32b65aaf54402396a99beef617f9c0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.846456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd32b65aaf54402396a99beef617f9c0 [ 763.847154] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8999b9ee-ae7e-4438-80b7-dffdb3e92630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.847562] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 63dc54648c384892a13d924471ec57bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.862794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63dc54648c384892a13d924471ec57bc [ 763.862794] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 018ab9c2-8c6d-4836-9e26-70ffc33b9b30 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.862794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e422ea4afaec42d4b8b217fac11a986d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.867758] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e422ea4afaec42d4b8b217fac11a986d [ 763.868586] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 93bf61a5-0737-4495-854d-14f1feebab86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.869009] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c2328f0696a54d58a003dcf25501df7e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.883467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2328f0696a54d58a003dcf25501df7e [ 763.884223] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f814cbd2-8d20-4a26-9bae-000a70a3e082 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.884813] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8d7e36446719450ca08a9f997866d367 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.893969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d7e36446719450ca08a9f997866d367 [ 763.895007] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fc6dad2-0cde-46db-b840-3bd2737a91af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.895469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 31a4ee58d1314f8ab36af6be07d1dfe0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.918171] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31a4ee58d1314f8ab36af6be07d1dfe0 [ 763.918882] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.919542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b478fc8861694b3faca824a663747d37 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.929051] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b478fc8861694b3faca824a663747d37 [ 763.929994] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28428441-219c-4627-857e-ab8b91390c68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.930536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ceaf71c7cdcc4a0c9cb8c75ab04d5cd1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.942354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ceaf71c7cdcc4a0c9cb8c75ab04d5cd1 [ 763.943241] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5e9ab69f-856e-4b8d-808a-0799b87a9cc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.943803] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 276c1fc67a2c46bbb13ee2dafb10c541 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.952641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 276c1fc67a2c46bbb13ee2dafb10c541 [ 763.953260] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b176b001-3c32-439a-b6cd-9b608a0ac623 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.954927] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5c2bcddf11774017b7340667341388ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.962726] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2bcddf11774017b7340667341388ba [ 763.963394] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95dad1e2-74d2-478f-8095-23a26770e27f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.963845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg eee9ccac2b2647e1bf95bc8e4e3d4194 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 763.977608] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eee9ccac2b2647e1bf95bc8e4e3d4194 [ 763.978302] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.978534] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 763.978677] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 764.457030] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fd7032-3b5c-4014-ae79-a9f4a9a6b598 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.464737] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5cb273-6f86-46eb-b6f0-235cfd2fb572 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.495356] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44821b81-6df4-49ad-b9e1-abb5fcdf81ed {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.504939] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84778aa6-5516-487d-ad9b-7ab0caefc8f7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.524652] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.524652] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1c0784e35db5409db223b76dfb5c36f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 764.526908] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c0784e35db5409db223b76dfb5c36f2 [ 764.527785] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.530100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b764f1cb51dd49c6b6f7f9aed49390ff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 764.542486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b764f1cb51dd49c6b6f7f9aed49390ff [ 764.543216] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 764.543393] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.004s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.620981] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Successfully created port: 34feb753-f797-4686-9983-19a66245aad6 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.543471] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.543767] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.543860] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 765.543985] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 765.544615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg aaa2d86e814447fbaf9f53d716707d35 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 765.564173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaa2d86e814447fbaf9f53d716707d35 [ 765.566194] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29281253-e489-48f5-b219-75ae984adb00] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.566328] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.566456] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.566587] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.566710] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.566830] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.567003] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.567172] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.567303] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.567462] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 765.567537] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 765.568048] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.568312] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.568504] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.568592] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.568923] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.568923] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 765.568988] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 765.577303] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Successfully updated port: 34feb753-f797-4686-9983-19a66245aad6 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.578293] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 8ae5125b961e4799b458694112e0b736 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 765.593178] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ae5125b961e4799b458694112e0b736 [ 765.593981] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "refresh_cache-95426048-d403-4dad-9ad7-b76de655a319" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.594106] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquired lock "refresh_cache-95426048-d403-4dad-9ad7-b76de655a319" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.594265] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 765.594657] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg a63fec45ee244b138397a0fc820939f9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 765.606769] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a63fec45ee244b138397a0fc820939f9 [ 765.672824] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 765.837561] env[61649]: DEBUG nova.compute.manager [req-65c36e88-3f52-4161-adf4-13c09cefb3b7 req-3b159548-b3f3-412d-bf60-628117d475be service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Received event network-vif-plugged-34feb753-f797-4686-9983-19a66245aad6 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 765.837786] env[61649]: DEBUG oslo_concurrency.lockutils [req-65c36e88-3f52-4161-adf4-13c09cefb3b7 req-3b159548-b3f3-412d-bf60-628117d475be service nova] Acquiring lock "95426048-d403-4dad-9ad7-b76de655a319-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.837990] env[61649]: DEBUG oslo_concurrency.lockutils [req-65c36e88-3f52-4161-adf4-13c09cefb3b7 req-3b159548-b3f3-412d-bf60-628117d475be service nova] Lock "95426048-d403-4dad-9ad7-b76de655a319-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.838156] env[61649]: DEBUG oslo_concurrency.lockutils [req-65c36e88-3f52-4161-adf4-13c09cefb3b7 req-3b159548-b3f3-412d-bf60-628117d475be service nova] Lock "95426048-d403-4dad-9ad7-b76de655a319-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.838325] env[61649]: DEBUG nova.compute.manager [req-65c36e88-3f52-4161-adf4-13c09cefb3b7 req-3b159548-b3f3-412d-bf60-628117d475be service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] No waiting events found dispatching network-vif-plugged-34feb753-f797-4686-9983-19a66245aad6 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 765.838489] env[61649]: WARNING nova.compute.manager [req-65c36e88-3f52-4161-adf4-13c09cefb3b7 req-3b159548-b3f3-412d-bf60-628117d475be service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Received unexpected event network-vif-plugged-34feb753-f797-4686-9983-19a66245aad6 for instance with vm_state building and task_state spawning. [ 765.851852] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Updating instance_info_cache with network_info: [{"id": "34feb753-f797-4686-9983-19a66245aad6", "address": "fa:16:3e:ae:fa:6c", "network": {"id": "3e8c70e9-f7a0-4699-8f58-e4a4c850e78a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-17951275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d0b5bbd9dc6240abb9b778ae8cc529d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34feb753-f7", "ovs_interfaceid": "34feb753-f797-4686-9983-19a66245aad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.852520] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 4cf59fa95cc74377b1fd836fb37a2403 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 765.865045] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cf59fa95cc74377b1fd836fb37a2403 [ 765.865722] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Releasing lock "refresh_cache-95426048-d403-4dad-9ad7-b76de655a319" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.865877] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance network_info: |[{"id": "34feb753-f797-4686-9983-19a66245aad6", "address": "fa:16:3e:ae:fa:6c", "network": {"id": "3e8c70e9-f7a0-4699-8f58-e4a4c850e78a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-17951275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d0b5bbd9dc6240abb9b778ae8cc529d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34feb753-f7", "ovs_interfaceid": "34feb753-f797-4686-9983-19a66245aad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 765.867061] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:fa:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34feb753-f797-4686-9983-19a66245aad6', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.874041] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Creating folder: Project (d0b5bbd9dc6240abb9b778ae8cc529d3). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 765.874713] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e03086a7-227d-483d-ac1a-37b38893ee2f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.885773] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Created folder: Project (d0b5bbd9dc6240abb9b778ae8cc529d3) in parent group-v51588. [ 765.885958] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Creating folder: Instances. Parent ref: group-v51626. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 765.886194] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05b2a0c6-7276-4b62-8235-7c43fd19bfbd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.894450] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Created folder: Instances in parent group-v51626. [ 765.894665] env[61649]: DEBUG oslo.service.loopingcall [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.894839] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 765.895023] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c012213-0181-476f-94ef-811cf71a387e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.913727] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.913727] env[61649]: value = "task-158134" [ 765.913727] env[61649]: _type = "Task" [ 765.913727] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.921595] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.423404] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task} progress is 25%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.924464] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task} progress is 25%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.425079] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task} progress is 25%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.929160] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task} progress is 25%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.161795] env[61649]: DEBUG nova.compute.manager [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Received event network-changed-34feb753-f797-4686-9983-19a66245aad6 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 768.161982] env[61649]: DEBUG nova.compute.manager [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Refreshing instance network info cache due to event network-changed-34feb753-f797-4686-9983-19a66245aad6. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 768.162195] env[61649]: DEBUG oslo_concurrency.lockutils [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] Acquiring lock "refresh_cache-95426048-d403-4dad-9ad7-b76de655a319" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.162332] env[61649]: DEBUG oslo_concurrency.lockutils [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] Acquired lock "refresh_cache-95426048-d403-4dad-9ad7-b76de655a319" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.162487] env[61649]: DEBUG nova.network.neutron [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Refreshing network info cache for port 34feb753-f797-4686-9983-19a66245aad6 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 768.162989] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] Expecting reply to msg 05980f5b7b1a4525b7c71cdc8fc3749c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 768.170757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05980f5b7b1a4525b7c71cdc8fc3749c [ 768.430326] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task} progress is 99%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.721473] env[61649]: DEBUG nova.network.neutron [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Updated VIF entry in instance network info cache for port 34feb753-f797-4686-9983-19a66245aad6. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 768.721829] env[61649]: DEBUG nova.network.neutron [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Updating instance_info_cache with network_info: [{"id": "34feb753-f797-4686-9983-19a66245aad6", "address": "fa:16:3e:ae:fa:6c", "network": {"id": "3e8c70e9-f7a0-4699-8f58-e4a4c850e78a", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-17951275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d0b5bbd9dc6240abb9b778ae8cc529d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34feb753-f7", "ovs_interfaceid": "34feb753-f797-4686-9983-19a66245aad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.722349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] Expecting reply to msg ba0b0bbdb09c45a5835556878e23609f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 768.730905] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba0b0bbdb09c45a5835556878e23609f [ 768.731536] env[61649]: DEBUG oslo_concurrency.lockutils [req-99bc810b-ec99-4eea-bdb5-ab3de0b7c0d0 req-9e5c09a0-2a6b-4ed4-ad86-f6a7f360619b service nova] Releasing lock "refresh_cache-95426048-d403-4dad-9ad7-b76de655a319" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.930132] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158134, 'name': CreateVM_Task, 'duration_secs': 2.909034} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.930448] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 768.931091] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.931328] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.932077] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 768.932457] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05be3a76-1bd0-4e6a-83ad-6f2621069d25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.938066] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Waiting for the task: (returnval){ [ 768.938066] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b37c30-af82-7822-6693-aa479c52b5ef" [ 768.938066] env[61649]: _type = "Task" [ 768.938066] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.946874] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b37c30-af82-7822-6693-aa479c52b5ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.448368] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.448510] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 769.448717] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.468779] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 299c0f2be3184f6f9cd9550ac72ac3c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 770.477119] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 299c0f2be3184f6f9cd9550ac72ac3c8 [ 770.477577] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "aacbebf5-bd31-465b-b574-6c4a98b27f30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.587046] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 6ca7895f74024a478c7c536d7bda1911 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 770.596463] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca7895f74024a478c7c536d7bda1911 [ 770.596463] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "29281253-e489-48f5-b219-75ae984adb00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.984369] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 087b28080aeb4625ba653b8fe383b497 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 770.999985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 087b28080aeb4625ba653b8fe383b497 [ 772.610761] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg fd6dbade2f544cafa7d7185e6832a6c0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 772.619565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd6dbade2f544cafa7d7185e6832a6c0 [ 772.620078] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.648576] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg e70e000189264a2db68297c96f526247 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 777.660050] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e70e000189264a2db68297c96f526247 [ 777.660552] env[61649]: DEBUG oslo_concurrency.lockutils [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.413575] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "bf2399eb-b2df-43b3-bddd-48692825c40a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.413848] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.522508] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 699d817053da448c878e8509d37dadea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 779.531341] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 699d817053da448c878e8509d37dadea [ 779.531801] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "00931111-13a1-447d-a401-943221badd59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.372414] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg cbae1463a6b3455fba3e8744e6b2dbd5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 781.380693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbae1463a6b3455fba3e8744e6b2dbd5 [ 781.381153] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "7f9f2074-6822-4d9d-9791-4bebc7e55862" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.766020] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 3bdcba1feb614db59651c02094bde0e4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 783.779173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bdcba1feb614db59651c02094bde0e4 [ 783.779705] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "bf8c692f-6510-4548-aedd-0e1792512e20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.805732] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg fbe8101e84d84daab1d6c90e13762777 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 785.814703] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbe8101e84d84daab1d6c90e13762777 [ 785.815145] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "95426048-d403-4dad-9ad7-b76de655a319" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.004021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg d5b8f3ce7bb44134a51413162512b603 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 786.013080] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5b8f3ce7bb44134a51413162512b603 [ 786.013529] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.772657] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "6ab197e9-3e38-4b37-b625-c30b6977261a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.772942] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.989420] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Acquiring lock "d7a1c8c3-1694-4704-8414-098af751c05e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.989718] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Lock "d7a1c8c3-1694-4704-8414-098af751c05e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.886909] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Acquiring lock "e4d8cb96-182d-4b77-a8ac-dfd1bf52d484" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.886909] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "e4d8cb96-182d-4b77-a8ac-dfd1bf52d484" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.911200] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Acquiring lock "10da1abe-1c95-44b8-a10d-ce618625b69b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.911200] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "10da1abe-1c95-44b8-a10d-ce618625b69b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.879336] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Acquiring lock "a086f03c-c993-4e1a-8a3e-efa40bb8b8bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.879648] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Lock "a086f03c-c993-4e1a-8a3e-efa40bb8b8bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.552351] env[61649]: WARNING oslo_vmware.rw_handles [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 810.552351] env[61649]: ERROR oslo_vmware.rw_handles [ 810.552964] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 810.554687] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 810.554919] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Copying Virtual Disk [datastore1] vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/adfe1fa3-1afb-4b5d-8d05-a868b244bbec/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 810.555213] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c374741-32ae-47f7-aef3-ec8107b52fba {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.563860] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Waiting for the task: (returnval){ [ 810.563860] env[61649]: value = "task-158135" [ 810.563860] env[61649]: _type = "Task" [ 810.563860] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.572110] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Task: {'id': task-158135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.073376] env[61649]: DEBUG oslo_vmware.exceptions [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 811.073672] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.074247] env[61649]: ERROR nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 811.074247] env[61649]: Faults: ['InvalidArgument'] [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] Traceback (most recent call last): [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] yield resources [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self.driver.spawn(context, instance, image_meta, [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self._fetch_image_if_missing(context, vi) [ 811.074247] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] image_cache(vi, tmp_image_ds_loc) [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] vm_util.copy_virtual_disk( [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] session._wait_for_task(vmdk_copy_task) [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] return self.wait_for_task(task_ref) [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] return evt.wait() [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] result = hub.switch() [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 811.074677] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] return self.greenlet.switch() [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self.f(*self.args, **self.kw) [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] raise exceptions.translate_fault(task_info.error) [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] Faults: ['InvalidArgument'] [ 811.075033] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] [ 811.075033] env[61649]: INFO nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Terminating instance [ 811.076197] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.076452] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.077001] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 811.077202] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.077430] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61e37364-bc9d-4246-8009-8ad0a7b1a7f7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.079797] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91e4812-fec2-4704-863a-e7c5e681756e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.086635] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 811.086823] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94444d7b-490b-4bda-afc7-69594a7f9a44 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.089051] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.089252] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 811.090206] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6271dfd-f2fe-43fc-9b50-fe2b44775862 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.095999] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Waiting for the task: (returnval){ [ 811.095999] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52769bed-9af2-4f00-e480-cbf74292fdf3" [ 811.095999] env[61649]: _type = "Task" [ 811.095999] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.103404] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52769bed-9af2-4f00-e480-cbf74292fdf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.152034] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 811.152269] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 811.152470] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Deleting the datastore file [datastore1] 29281253-e489-48f5-b219-75ae984adb00 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.152769] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4d5e9fc-dcf8-4a93-b298-9ece94927714 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.158850] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Waiting for the task: (returnval){ [ 811.158850] env[61649]: value = "task-158137" [ 811.158850] env[61649]: _type = "Task" [ 811.158850] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.166616] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Task: {'id': task-158137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.606289] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 811.606585] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Creating directory with path [datastore1] vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.606775] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1cdf70e-956a-43b1-acd5-515c261b16ec {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.618560] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Created directory with path [datastore1] vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.621168] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Fetch image to [datastore1] vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 811.621168] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 811.621168] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c5f10e-ef0d-4ebd-a215-4a258f0076b5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.626402] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a2d630-f2d7-47b7-b552-82d09aa700ce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.636669] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dc70e8-0e5a-4d8c-8162-3b11b3cb8f4f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.680122] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd33cd0-761c-473c-b04b-98dfad834f61 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.691677] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7585d16c-7ff7-4abf-a37c-4b90d2917c19 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.693710] env[61649]: DEBUG oslo_vmware.api [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Task: {'id': task-158137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078649} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.694027] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.694248] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 811.694465] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.694682] env[61649]: INFO nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Took 0.62 seconds to destroy the instance on the hypervisor. [ 811.697197] env[61649]: DEBUG nova.compute.claims [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 811.697419] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.697673] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.700606] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 86dbbcf85c064f8b814a112a834c089a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 811.723102] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 811.739481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86dbbcf85c064f8b814a112a834c089a [ 811.794682] env[61649]: DEBUG oslo_vmware.rw_handles [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 811.856671] env[61649]: DEBUG oslo_vmware.rw_handles [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 811.856912] env[61649]: DEBUG oslo_vmware.rw_handles [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 812.238637] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004eabae-6b86-4c0c-96de-32098bd6f0d2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.246296] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6ac0fe-0d5e-45de-990e-fac4b8aaf827 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.278769] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0e99c6-8a65-4063-96b0-31015a0ba32f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.287206] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e55acf-9963-4d58-a17f-40f57771f8ff {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.300924] env[61649]: DEBUG nova.compute.provider_tree [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.301457] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 9c7e2510b7ca465ca2a9e5056b44442e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.309315] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c7e2510b7ca465ca2a9e5056b44442e [ 812.310249] env[61649]: DEBUG nova.scheduler.client.report [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 812.312541] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg a17f5c10ea1c44e184c0d9824af64949 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.328896] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a17f5c10ea1c44e184c0d9824af64949 [ 812.329546] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.632s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.330061] env[61649]: ERROR nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.330061] env[61649]: Faults: ['InvalidArgument'] [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] Traceback (most recent call last): [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self.driver.spawn(context, instance, image_meta, [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self._fetch_image_if_missing(context, vi) [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] image_cache(vi, tmp_image_ds_loc) [ 812.330061] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] vm_util.copy_virtual_disk( [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] session._wait_for_task(vmdk_copy_task) [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] return self.wait_for_task(task_ref) [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] return evt.wait() [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] result = hub.switch() [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] return self.greenlet.switch() [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 812.330450] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] self.f(*self.args, **self.kw) [ 812.330825] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 812.330825] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] raise exceptions.translate_fault(task_info.error) [ 812.330825] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 812.330825] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] Faults: ['InvalidArgument'] [ 812.330825] env[61649]: ERROR nova.compute.manager [instance: 29281253-e489-48f5-b219-75ae984adb00] [ 812.330825] env[61649]: DEBUG nova.compute.utils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.332972] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Build of instance 29281253-e489-48f5-b219-75ae984adb00 was re-scheduled: A specified parameter was not correct: fileType [ 812.332972] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 812.333165] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 812.333261] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 812.333421] env[61649]: DEBUG nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 812.333584] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 812.797720] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg d9b9f0e7198b4b68b58c917fb0e6f89b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.805611] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9b9f0e7198b4b68b58c917fb0e6f89b [ 812.806213] env[61649]: DEBUG nova.network.neutron [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.806693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 7c57b5fb1c5f44509861f86b88b979fc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.823848] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c57b5fb1c5f44509861f86b88b979fc [ 812.824240] env[61649]: INFO nova.compute.manager [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Took 0.49 seconds to deallocate network for instance. [ 812.825972] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 4b139418df48493fa4daf988832d4059 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.890248] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b139418df48493fa4daf988832d4059 [ 812.892991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 709d4b63bfd149adac2ad4a23d3aba13 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.930454] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 709d4b63bfd149adac2ad4a23d3aba13 [ 812.951377] env[61649]: INFO nova.scheduler.client.report [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Deleted allocations for instance 29281253-e489-48f5-b219-75ae984adb00 [ 812.964704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg c5e3605c6cb947bca85aa7afebaea85d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.978546] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5e3605c6cb947bca85aa7afebaea85d [ 812.979183] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3cb9e85c-e93c-41bf-a35c-46c8b78eefb0 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "29281253-e489-48f5-b219-75ae984adb00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.450s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.979739] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg f5c7e387c4af4c66889fa3ee974bf2be in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 812.980510] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "29281253-e489-48f5-b219-75ae984adb00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 42.384s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.980719] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Acquiring lock "29281253-e489-48f5-b219-75ae984adb00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.980926] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "29281253-e489-48f5-b219-75ae984adb00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.981085] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "29281253-e489-48f5-b219-75ae984adb00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.983531] env[61649]: INFO nova.compute.manager [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Terminating instance [ 812.985900] env[61649]: DEBUG nova.compute.manager [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 812.986081] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.986331] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b527ec79-acd5-4434-84fb-31b1c7d5ea58 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.995376] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5c7e387c4af4c66889fa3ee974bf2be [ 812.995978] env[61649]: DEBUG nova.compute.manager [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: adc73db6-8bff-4007-ae74-528a37840d96] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 812.997643] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 295f638c9373483aa03b5ff14d8d8ee1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.002152] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fee379-f044-4ea9-8a0f-10263d7c1500 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.032472] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29281253-e489-48f5-b219-75ae984adb00 could not be found. [ 813.032659] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.032840] env[61649]: INFO nova.compute.manager [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] [instance: 29281253-e489-48f5-b219-75ae984adb00] Took 0.05 seconds to destroy the instance on the hypervisor. [ 813.033085] env[61649]: DEBUG oslo.service.loopingcall [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.033304] env[61649]: DEBUG nova.compute.manager [-] [instance: 29281253-e489-48f5-b219-75ae984adb00] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 813.033404] env[61649]: DEBUG nova.network.neutron [-] [instance: 29281253-e489-48f5-b219-75ae984adb00] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 813.047655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 295f638c9373483aa03b5ff14d8d8ee1 [ 813.048381] env[61649]: DEBUG nova.compute.manager [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: adc73db6-8bff-4007-ae74-528a37840d96] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.049006] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg cdf6ab8b9c01469292005b31d77deb2b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.059565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdf6ab8b9c01469292005b31d77deb2b [ 813.066270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f2b474c131874f14906f3c095597aeff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.071129] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aea03b43-01a8-4ff8-b0b4-77d5fd111c29 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "adc73db6-8bff-4007-ae74-528a37840d96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.773s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.071669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] Expecting reply to msg 22bc03d6a0124903ac7cd17873f5f26f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.072987] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2b474c131874f14906f3c095597aeff [ 813.072987] env[61649]: DEBUG nova.network.neutron [-] [instance: 29281253-e489-48f5-b219-75ae984adb00] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.073344] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d9da52fff70f44dfaef6ed47b1fc02ad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.079443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22bc03d6a0124903ac7cd17873f5f26f [ 813.079842] env[61649]: DEBUG nova.compute.manager [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] [instance: 788fc9bb-2f88-4f82-88cf-9c7a002edb47] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.081625] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] Expecting reply to msg 01cc7ff1869a4ca0a32a38bf36f4c909 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.085994] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9da52fff70f44dfaef6ed47b1fc02ad [ 813.086432] env[61649]: INFO nova.compute.manager [-] [instance: 29281253-e489-48f5-b219-75ae984adb00] Took 0.05 seconds to deallocate network for instance. [ 813.090365] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 916c196c1d0c44f89d6bf57fb7d9bd8b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.109437] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01cc7ff1869a4ca0a32a38bf36f4c909 [ 813.110112] env[61649]: DEBUG nova.compute.manager [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] [instance: 788fc9bb-2f88-4f82-88cf-9c7a002edb47] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.110480] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] Expecting reply to msg 71b5b0985beb4d498fdb637f0e212c9c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.129686] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71b5b0985beb4d498fdb637f0e212c9c [ 813.132656] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 916c196c1d0c44f89d6bf57fb7d9bd8b [ 813.140690] env[61649]: DEBUG oslo_concurrency.lockutils [None req-09ed1a11-02fe-4825-b7cb-16cbc292b66b tempest-ServerActionsTestOtherA-266297113 tempest-ServerActionsTestOtherA-266297113-project-member] Lock "788fc9bb-2f88-4f82-88cf-9c7a002edb47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.548s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.141230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] Expecting reply to msg d49c6940d5d2444a994e15bb0ebbe74b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.146191] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 12dd2ed9dfaf40d1a733162443bce1df in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.149584] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d49c6940d5d2444a994e15bb0ebbe74b [ 813.150005] env[61649]: DEBUG nova.compute.manager [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] [instance: 62d0a02d-88af-48f2-a14a-c9f2e899babe] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.151733] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] Expecting reply to msg a661bdd61df94255ad62466c06851747 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.174306] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a661bdd61df94255ad62466c06851747 [ 813.174886] env[61649]: DEBUG nova.compute.manager [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] [instance: 62d0a02d-88af-48f2-a14a-c9f2e899babe] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.175582] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] Expecting reply to msg e194ca35df284f709010519ef0995f78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.207967] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e194ca35df284f709010519ef0995f78 [ 813.219383] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60fa1d55-6048-4177-ab1c-ae2aedaeacc7 tempest-VolumesAssistedSnapshotsTest-1738453556 tempest-VolumesAssistedSnapshotsTest-1738453556-project-member] Lock "62d0a02d-88af-48f2-a14a-c9f2e899babe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.940s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.220074] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Expecting reply to msg 6a646ce5192d4b6aa441d761cf0ff58f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.226671] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12dd2ed9dfaf40d1a733162443bce1df [ 813.230393] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a646ce5192d4b6aa441d761cf0ff58f [ 813.231174] env[61649]: DEBUG nova.compute.manager [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] [instance: c7fac9e7-0802-4f2e-a577-4ee50efa835a] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.232790] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Expecting reply to msg a010efd13ebc4f47a481e77c8dc9df10 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.233751] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Lock "29281253-e489-48f5-b219-75ae984adb00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.253s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.234041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a85fabab-cbcf-4b7e-ad67-0cf75e67bf72 tempest-ServerDiagnosticsTest-975555243 tempest-ServerDiagnosticsTest-975555243-project-member] Expecting reply to msg 3d53a069450b444d8fd62768c410a32b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.250130] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d53a069450b444d8fd62768c410a32b [ 813.262112] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a010efd13ebc4f47a481e77c8dc9df10 [ 813.262663] env[61649]: DEBUG nova.compute.manager [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] [instance: c7fac9e7-0802-4f2e-a577-4ee50efa835a] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.263004] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Expecting reply to msg 8e47d93ba9834a8c875fa5b0e35344b1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.277783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e47d93ba9834a8c875fa5b0e35344b1 [ 813.289254] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b159c52b-1dd5-4d85-808e-44e3a408759e tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Lock "c7fac9e7-0802-4f2e-a577-4ee50efa835a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.515s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.294317] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] Expecting reply to msg ed948ab147d040be921a769073b70c6f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.316719] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed948ab147d040be921a769073b70c6f [ 813.316719] env[61649]: DEBUG nova.compute.manager [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] [instance: b9315087-b61c-488c-aaa9-5f4b4e2f12b4] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.316719] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] Expecting reply to msg c16f13f90b464dcc9500c86215d2e131 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.341930] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c16f13f90b464dcc9500c86215d2e131 [ 813.342553] env[61649]: DEBUG nova.compute.manager [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] [instance: b9315087-b61c-488c-aaa9-5f4b4e2f12b4] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.342928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] Expecting reply to msg 07aa2c4ed5664a518443a2a891b12f07 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.354251] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07aa2c4ed5664a518443a2a891b12f07 [ 813.373847] env[61649]: DEBUG oslo_concurrency.lockutils [None req-006cb8b0-9585-43e4-b3a3-d67269f24d31 tempest-AttachInterfacesV270Test-288835143 tempest-AttachInterfacesV270Test-288835143-project-member] Lock "b9315087-b61c-488c-aaa9-5f4b4e2f12b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.837s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.374421] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 3a79a89683a94bc4ba6a133bb32a689a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.387484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a79a89683a94bc4ba6a133bb32a689a [ 813.388321] env[61649]: DEBUG nova.compute.manager [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: 3889663a-53e8-4d3e-bed6-5e86519522ef] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.390137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 92aeab66cb0f46448985dcdc09ac55e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.421825] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92aeab66cb0f46448985dcdc09ac55e7 [ 813.422427] env[61649]: DEBUG nova.compute.manager [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: 3889663a-53e8-4d3e-bed6-5e86519522ef] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.422773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 7119dfbe1fea438cab781c5cdca708d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.433345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7119dfbe1fea438cab781c5cdca708d5 [ 813.447854] env[61649]: DEBUG oslo_concurrency.lockutils [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "3889663a-53e8-4d3e-bed6-5e86519522ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.092s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.449199] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 16f73ba424dc404f8f30038721ce9eae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.457818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16f73ba424dc404f8f30038721ce9eae [ 813.458254] env[61649]: DEBUG nova.compute.manager [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: 833dbc2a-a434-4ca1-aa33-b48a910c0e91] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.465263] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 397aceff1ee346eb9b58428e412a4ba6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.511762] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 397aceff1ee346eb9b58428e412a4ba6 [ 813.512537] env[61649]: DEBUG nova.compute.manager [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: 833dbc2a-a434-4ca1-aa33-b48a910c0e91] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.512889] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 767a659cbc1948dc9c6a99e2c323a64c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.524718] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 767a659cbc1948dc9c6a99e2c323a64c [ 813.536408] env[61649]: DEBUG oslo_concurrency.lockutils [None req-de234925-3cc8-45f6-baae-1684c0bdf5b6 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "833dbc2a-a434-4ca1-aa33-b48a910c0e91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.148s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.536989] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Expecting reply to msg 8c6ccbfc583b43d8af17acf9de34e167 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.546482] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c6ccbfc583b43d8af17acf9de34e167 [ 813.547187] env[61649]: DEBUG nova.compute.manager [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] [instance: 8819ddfb-4286-455b-8216-05e89424183e] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.548864] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Expecting reply to msg 8de7fba0f3114f6fb239444576bb4b08 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.582974] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8de7fba0f3114f6fb239444576bb4b08 [ 813.583884] env[61649]: DEBUG nova.compute.manager [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] [instance: 8819ddfb-4286-455b-8216-05e89424183e] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.584308] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Expecting reply to msg dfb471a4e8c743258902e7064655a464 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.594843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfb471a4e8c743258902e7064655a464 [ 813.607313] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d225dccc-3167-4f15-b2ee-e87bf6351be7 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Lock "8819ddfb-4286-455b-8216-05e89424183e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.070s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.608590] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg 9438fe4f53884ed797a91d52c93f238d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.617886] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9438fe4f53884ed797a91d52c93f238d [ 813.618336] env[61649]: DEBUG nova.compute.manager [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] [instance: fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.620048] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg 731837080f87424abee268149539fcd5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.643094] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 731837080f87424abee268149539fcd5 [ 813.651623] env[61649]: DEBUG nova.compute.manager [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] [instance: fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.651623] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg f4548d90c7434e46bb44a0a9a75f7d4a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.664089] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4548d90c7434e46bb44a0a9a75f7d4a [ 813.676227] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aa883485-a69e-40d7-ba40-33bfb1fcbd53 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Lock "fb407af6-66cb-4b3d-b630-d2b5a4b2c8d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.059s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.676663] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] Expecting reply to msg a865b1ec35f6415e9ab2b323b824ba24 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.689478] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a865b1ec35f6415e9ab2b323b824ba24 [ 813.690232] env[61649]: DEBUG nova.compute.manager [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] [instance: fc905ad2-7f1d-4356-a8f7-1eda98cdd01d] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.691756] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] Expecting reply to msg 18087534ff984f06b80be5aec86ecaa7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.717664] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18087534ff984f06b80be5aec86ecaa7 [ 813.718271] env[61649]: DEBUG nova.compute.manager [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] [instance: fc905ad2-7f1d-4356-a8f7-1eda98cdd01d] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.718619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] Expecting reply to msg 8dfd8d8033f942e2a91f2c2873e746dd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.729760] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dfd8d8033f942e2a91f2c2873e746dd [ 813.748723] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8264bde-bd22-40ec-b19d-68c990b8317f tempest-AttachInterfacesUnderV243Test-1128518305 tempest-AttachInterfacesUnderV243Test-1128518305-project-member] Lock "fc905ad2-7f1d-4356-a8f7-1eda98cdd01d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.821s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.749377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg 2f508583b3bf4fbc99cf86695aad806a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.758818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f508583b3bf4fbc99cf86695aad806a [ 813.759391] env[61649]: DEBUG nova.compute.manager [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] [instance: 4868f1fe-04d3-4055-bf61-8a46723cf573] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.761237] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg 4ea3ea2a2de641d3999f8839cacc8f20 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.787059] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ea3ea2a2de641d3999f8839cacc8f20 [ 813.788083] env[61649]: DEBUG nova.compute.manager [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] [instance: 4868f1fe-04d3-4055-bf61-8a46723cf573] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.788497] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg fbf7502b2dc14ccdb65b51991a2528f8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.800765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbf7502b2dc14ccdb65b51991a2528f8 [ 813.819361] env[61649]: DEBUG oslo_concurrency.lockutils [None req-74f1ff70-60f1-43da-a311-3a5f92da5ea5 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Lock "4868f1fe-04d3-4055-bf61-8a46723cf573" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.430s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.821384] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Expecting reply to msg 2418d2dd07774867a817604adb65ce93 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.837923] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2418d2dd07774867a817604adb65ce93 [ 813.838539] env[61649]: DEBUG nova.compute.manager [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] [instance: fffdaf35-4e17-40ba-95a7-cf34fa04737e] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.840443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Expecting reply to msg 3a717f31fba94214b56aa5e8e2098e66 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.887490] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a717f31fba94214b56aa5e8e2098e66 [ 813.888167] env[61649]: DEBUG nova.compute.manager [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] [instance: fffdaf35-4e17-40ba-95a7-cf34fa04737e] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.888517] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Expecting reply to msg b40ebce4da87452a836e08dc19bf8ed0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.906794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b40ebce4da87452a836e08dc19bf8ed0 [ 813.922654] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2e964edf-0182-4c2a-a9c0-4889d6336187 tempest-ServerRescueNegativeTestJSON-1074592202 tempest-ServerRescueNegativeTestJSON-1074592202-project-member] Lock "fffdaf35-4e17-40ba-95a7-cf34fa04737e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.040s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.923482] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg d4271f1c1f6b4d8cba08245e1914c94b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.935835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4271f1c1f6b4d8cba08245e1914c94b [ 813.936403] env[61649]: DEBUG nova.compute.manager [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] [instance: bf8a0b66-22ef-4f1e-99a3-9727d4a61c02] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 813.938413] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg ae84c84b81084fe78ff624251ce500da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.972452] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae84c84b81084fe78ff624251ce500da [ 813.972452] env[61649]: DEBUG nova.compute.manager [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] [instance: bf8a0b66-22ef-4f1e-99a3-9727d4a61c02] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 813.972452] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Expecting reply to msg cded01664c23446799149ae499100d8a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 813.984216] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cded01664c23446799149ae499100d8a [ 814.002536] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0e74fbc9-1569-444e-8bd5-44603fbe0080 tempest-ListServerFiltersTestJSON-1824319426 tempest-ListServerFiltersTestJSON-1824319426-project-member] Lock "bf8a0b66-22ef-4f1e-99a3-9727d4a61c02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.503s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.003137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] Expecting reply to msg 7a56ba78d4004357997c09192a103720 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.018308] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a56ba78d4004357997c09192a103720 [ 814.018864] env[61649]: DEBUG nova.compute.manager [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] [instance: 6126223b-c712-4260-a49b-7a56c4035e75] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.020651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] Expecting reply to msg a065573bc87f45718bad7a9a77f86fa0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.046501] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a065573bc87f45718bad7a9a77f86fa0 [ 814.047526] env[61649]: DEBUG nova.compute.manager [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] [instance: 6126223b-c712-4260-a49b-7a56c4035e75] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 814.047879] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] Expecting reply to msg 1306e6a57992443a8471451c3e15bab7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.069083] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1306e6a57992443a8471451c3e15bab7 [ 814.080550] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9642a302-e440-4176-ba64-f571ed6432e4 tempest-InstanceActionsTestJSON-841455593 tempest-InstanceActionsTestJSON-841455593-project-member] Lock "6126223b-c712-4260-a49b-7a56c4035e75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.241s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.081145] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg e2c9cfd7de6e4d79a30c89f9ea7b077f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.095580] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2c9cfd7de6e4d79a30c89f9ea7b077f [ 814.096947] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 814.101855] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 497a60126fe941d393e77a5b198c8a2e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.138915] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 497a60126fe941d393e77a5b198c8a2e [ 814.155751] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.156349] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.158242] env[61649]: INFO nova.compute.claims [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.160366] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 2847e331d06c49e69891e41596b2842b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.223168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2847e331d06c49e69891e41596b2842b [ 814.223299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 85f77c869f5042eb9abc907d9cbea469 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.232390] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85f77c869f5042eb9abc907d9cbea469 [ 814.663863] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6d0a13-b256-4f17-9ee6-68cad98ff5e7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.671766] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d059e138-d78f-4442-9ac5-2ce48db8da55 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.706695] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3e6710-6213-4f40-a3db-df738e8928e6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.717547] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657149b1-b462-4fae-aff9-80f78481e48e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.732263] env[61649]: DEBUG nova.compute.provider_tree [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.732908] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 6c259b43427444eeb57096a3133d5e96 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.744152] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c259b43427444eeb57096a3133d5e96 [ 814.745520] env[61649]: DEBUG nova.scheduler.client.report [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 814.749443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 0aacaf04ee4d49cd8815ff92230adafa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.767255] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aacaf04ee4d49cd8815ff92230adafa [ 814.767255] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.609s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.767255] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 814.767604] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 2ee42377a95e4a27854100bb560dda72 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.795270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ee42377a95e4a27854100bb560dda72 [ 814.796995] env[61649]: DEBUG nova.compute.utils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.797744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 34a51f1ad2014ab5afdc408463a5af4a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.799271] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 814.799484] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 814.808438] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34a51f1ad2014ab5afdc408463a5af4a [ 814.809229] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 814.812074] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 4849fccadece4a78a9e6a454c4a07883 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.849384] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4849fccadece4a78a9e6a454c4a07883 [ 814.853619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg b4a425bb4b1545a5ab502fa0107aadde in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 814.884634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4a425bb4b1545a5ab502fa0107aadde [ 814.885851] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 814.897016] env[61649]: DEBUG nova.policy [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6031e11c5c14dcbbeec8825bf6a5a88', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ed1a40fb9324c3e9551f4148660c051', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 814.909788] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 814.910099] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 814.910324] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 814.910548] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 814.910734] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 814.910921] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 814.911168] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 814.911369] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 814.911577] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 814.911782] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 814.912094] env[61649]: DEBUG nova.virt.hardware [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 814.913605] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb43c09f-2e85-433d-98ab-7c737fe53bf3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.921804] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60977f9-5b69-4b34-96ac-527401d0e134 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.803100] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Successfully created port: 71fea60f-f613-4e96-a208-6d12d5daf6e0 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.523495] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Successfully updated port: 71fea60f-f613-4e96-a208-6d12d5daf6e0 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.524066] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg e6d4dc87351045dc9a55e8d800567eb7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 816.534933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6d4dc87351045dc9a55e8d800567eb7 [ 816.535753] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "refresh_cache-a0db1e96-4ca4-4fed-b86b-d8457f3570a9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.536087] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquired lock "refresh_cache-a0db1e96-4ca4-4fed-b86b-d8457f3570a9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.536087] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 816.536484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg d39ecbbf27744057a1813035148c31a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 816.543820] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d39ecbbf27744057a1813035148c31a5 [ 816.601966] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 816.867699] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Updating instance_info_cache with network_info: [{"id": "71fea60f-f613-4e96-a208-6d12d5daf6e0", "address": "fa:16:3e:4e:c0:75", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71fea60f-f6", "ovs_interfaceid": "71fea60f-f613-4e96-a208-6d12d5daf6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.868047] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 388260afbeb54eb38890999b493286fa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 816.880855] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 388260afbeb54eb38890999b493286fa [ 816.881524] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Releasing lock "refresh_cache-a0db1e96-4ca4-4fed-b86b-d8457f3570a9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.882009] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance network_info: |[{"id": "71fea60f-f613-4e96-a208-6d12d5daf6e0", "address": "fa:16:3e:4e:c0:75", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71fea60f-f6", "ovs_interfaceid": "71fea60f-f613-4e96-a208-6d12d5daf6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 816.882414] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:c0:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71fea60f-f613-4e96-a208-6d12d5daf6e0', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 816.889777] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Creating folder: Project (1ed1a40fb9324c3e9551f4148660c051). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 816.890410] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44c927b1-ee6b-42b8-9e52-612cb6b2a4fc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.908124] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Created folder: Project (1ed1a40fb9324c3e9551f4148660c051) in parent group-v51588. [ 816.908124] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Creating folder: Instances. Parent ref: group-v51629. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 816.908124] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f36d00b7-0c6c-42cc-a4e0-fbedc8c02f66 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.914171] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Created folder: Instances in parent group-v51629. [ 816.914488] env[61649]: DEBUG oslo.service.loopingcall [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.914866] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 816.915089] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-112b5171-d19f-4d55-ba57-76702934aa83 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.945186] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 816.945186] env[61649]: value = "task-158140" [ 816.945186] env[61649]: _type = "Task" [ 816.945186] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.954671] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158140, 'name': CreateVM_Task} progress is 6%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.045096] env[61649]: DEBUG nova.compute.manager [req-1bed2b6a-ee3d-4ac1-811a-2dce2c10a4cd req-ccbaef19-a07f-42dd-a107-06fe58a290ec service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Received event network-vif-plugged-71fea60f-f613-4e96-a208-6d12d5daf6e0 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 817.045340] env[61649]: DEBUG oslo_concurrency.lockutils [req-1bed2b6a-ee3d-4ac1-811a-2dce2c10a4cd req-ccbaef19-a07f-42dd-a107-06fe58a290ec service nova] Acquiring lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.045572] env[61649]: DEBUG oslo_concurrency.lockutils [req-1bed2b6a-ee3d-4ac1-811a-2dce2c10a4cd req-ccbaef19-a07f-42dd-a107-06fe58a290ec service nova] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.045716] env[61649]: DEBUG oslo_concurrency.lockutils [req-1bed2b6a-ee3d-4ac1-811a-2dce2c10a4cd req-ccbaef19-a07f-42dd-a107-06fe58a290ec service nova] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.045910] env[61649]: DEBUG nova.compute.manager [req-1bed2b6a-ee3d-4ac1-811a-2dce2c10a4cd req-ccbaef19-a07f-42dd-a107-06fe58a290ec service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] No waiting events found dispatching network-vif-plugged-71fea60f-f613-4e96-a208-6d12d5daf6e0 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 817.046032] env[61649]: WARNING nova.compute.manager [req-1bed2b6a-ee3d-4ac1-811a-2dce2c10a4cd req-ccbaef19-a07f-42dd-a107-06fe58a290ec service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Received unexpected event network-vif-plugged-71fea60f-f613-4e96-a208-6d12d5daf6e0 for instance with vm_state building and task_state spawning. [ 817.455361] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158140, 'name': CreateVM_Task} progress is 99%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.956186] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158140, 'name': CreateVM_Task} progress is 99%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.284528] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.284768] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.456868] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158140, 'name': CreateVM_Task, 'duration_secs': 1.361425} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.457063] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 818.457911] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.458081] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.458432] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.458693] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01ce45cb-633c-4a8a-a66a-9074b6f45ef1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.463839] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Waiting for the task: (returnval){ [ 818.463839] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b6f021-0b8a-ef20-a2c3-1f2afc04ad5e" [ 818.463839] env[61649]: _type = "Task" [ 818.463839] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.474890] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b6f021-0b8a-ef20-a2c3-1f2afc04ad5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.974924] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.975227] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.975700] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.376487] env[61649]: DEBUG nova.compute.manager [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Received event network-changed-71fea60f-f613-4e96-a208-6d12d5daf6e0 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 819.376487] env[61649]: DEBUG nova.compute.manager [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Refreshing instance network info cache due to event network-changed-71fea60f-f613-4e96-a208-6d12d5daf6e0. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 819.376636] env[61649]: DEBUG oslo_concurrency.lockutils [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] Acquiring lock "refresh_cache-a0db1e96-4ca4-4fed-b86b-d8457f3570a9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.376774] env[61649]: DEBUG oslo_concurrency.lockutils [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] Acquired lock "refresh_cache-a0db1e96-4ca4-4fed-b86b-d8457f3570a9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.376929] env[61649]: DEBUG nova.network.neutron [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Refreshing network info cache for port 71fea60f-f613-4e96-a208-6d12d5daf6e0 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 819.377443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] Expecting reply to msg 683f122ce3f8473a8035ec52035ad04f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 819.386047] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 683f122ce3f8473a8035ec52035ad04f [ 819.750886] env[61649]: DEBUG nova.network.neutron [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Updated VIF entry in instance network info cache for port 71fea60f-f613-4e96-a208-6d12d5daf6e0. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 819.751248] env[61649]: DEBUG nova.network.neutron [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Updating instance_info_cache with network_info: [{"id": "71fea60f-f613-4e96-a208-6d12d5daf6e0", "address": "fa:16:3e:4e:c0:75", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71fea60f-f6", "ovs_interfaceid": "71fea60f-f613-4e96-a208-6d12d5daf6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.751761] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] Expecting reply to msg f69da7e561e44e608a4357b2c2b5d001 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 819.761575] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f69da7e561e44e608a4357b2c2b5d001 [ 819.762202] env[61649]: DEBUG oslo_concurrency.lockutils [req-fb8f7e4f-435e-4743-b168-402b476e9a1a req-1a621c22-9c55-4714-ae7d-b421b14cad52 service nova] Releasing lock "refresh_cache-a0db1e96-4ca4-4fed-b86b-d8457f3570a9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.799305] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg de6f8b15d9dd406a81e5aaa8fe9e0262 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 819.807333] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de6f8b15d9dd406a81e5aaa8fe9e0262 [ 819.808379] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.210546] env[61649]: DEBUG oslo_concurrency.lockutils [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] Acquiring lock "7e77db18-077d-4665-ad90-c4e5f470716c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.210854] env[61649]: DEBUG oslo_concurrency.lockutils [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] Lock "7e77db18-077d-4665-ad90-c4e5f470716c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.929018] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.929342] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 821.929852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2032c15e8f65441d85c11e0bde6d6bb3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 821.939918] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2032c15e8f65441d85c11e0bde6d6bb3 [ 821.940434] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 821.940642] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.940778] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 821.941058] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9fa1f481649249fabf48e811d45823f6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 821.957278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fa1f481649249fabf48e811d45823f6 [ 821.959588] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.959588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1b35ebda3e7b4eba9b5b835824fe2bae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 821.968128] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b35ebda3e7b4eba9b5b835824fe2bae [ 823.968237] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.427049] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Acquiring lock "6efdc98b-e32d-4313-b13f-95c3d4911823" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.427049] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Lock "6efdc98b-e32d-4313-b13f-95c3d4911823" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.929296] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.929480] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 824.929603] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 824.930165] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f8b444c9af0c43bcb74d7ad3199ec1dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 824.952786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8b444c9af0c43bcb74d7ad3199ec1dc [ 824.955059] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955200] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955327] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955447] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955567] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955752] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955825] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.955905] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.956045] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.956183] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 824.956300] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 824.956900] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.956997] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.957189] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.957317] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 824.957455] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.957859] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8f710928729e45769ec062a0fab0354d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 824.965999] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f710928729e45769ec062a0fab0354d [ 824.966886] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.967101] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.967298] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.967446] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 824.968514] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2485d285-a5e7-4039-942c-1fece72f6990 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.977223] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdaaff3-de71-4bde-a7c3-45302324cc90 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.992442] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f641abba-dc9e-41df-89c3-90610d7729d5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.998604] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bfd3d9-59a7-4b85-93e5-4e50a47477f9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.027998] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181808MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 825.028171] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.028370] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.029251] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7a010f73e30b4498b42b70330fdf3f15 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.063330] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a010f73e30b4498b42b70330fdf3f15 [ 825.068097] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ff3df44d075b473caf12983d2d304a29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.078085] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff3df44d075b473caf12983d2d304a29 [ 825.097320] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aacbebf5-bd31-465b-b574-6c4a98b27f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.097542] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.097718] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.097876] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.098032] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.098187] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.098338] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.098489] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.098640] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.098787] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 825.099412] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 48dc503d9fd343a59bdf184ad468e0f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.112479] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48dc503d9fd343a59bdf184ad468e0f7 [ 825.113218] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 93bf61a5-0737-4495-854d-14f1feebab86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.113715] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 66bc7469665242199dd9397262466fb7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.122927] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66bc7469665242199dd9397262466fb7 [ 825.123593] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f814cbd2-8d20-4a26-9bae-000a70a3e082 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.124075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d523c3d0e51340fe845fb6d4e7956f04 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.132881] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d523c3d0e51340fe845fb6d4e7956f04 [ 825.134108] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fc6dad2-0cde-46db-b840-3bd2737a91af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.134108] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 97c96d48d40f49f18d6704c2e5358b31 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.143465] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97c96d48d40f49f18d6704c2e5358b31 [ 825.144090] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.144523] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9a2c532802c043b78083b94d2a3285ef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.153071] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a2c532802c043b78083b94d2a3285ef [ 825.153689] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28428441-219c-4627-857e-ab8b91390c68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.154092] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c54817e13d8947e9a04f4f67a69ab14b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.164088] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c54817e13d8947e9a04f4f67a69ab14b [ 825.164715] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5e9ab69f-856e-4b8d-808a-0799b87a9cc6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.165450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 56b26676d3b84b958982fe561c5021c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.174522] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56b26676d3b84b958982fe561c5021c5 [ 825.175141] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b176b001-3c32-439a-b6cd-9b608a0ac623 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.175777] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 62536727bc1b4bdc80b2c365628caa26 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.184399] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62536727bc1b4bdc80b2c365628caa26 [ 825.185034] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95dad1e2-74d2-478f-8095-23a26770e27f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.185526] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e55a22696d9f4221b63e7664df1085ad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.195523] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e55a22696d9f4221b63e7664df1085ad [ 825.196206] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.196652] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 240d92bb2aac439eadcb364eddf133ca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.205584] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 240d92bb2aac439eadcb364eddf133ca [ 825.206209] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.206645] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fd5be5143925498bb6d86510389d109e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.216032] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd5be5143925498bb6d86510389d109e [ 825.216160] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.216615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d67c2c71e0a7400098e03cfaf21ed0a4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.228105] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d67c2c71e0a7400098e03cfaf21ed0a4 [ 825.228804] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d7a1c8c3-1694-4704-8414-098af751c05e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.229284] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8b932cd462c04dcaa5bde19ec3b527d6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.242398] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b932cd462c04dcaa5bde19ec3b527d6 [ 825.243124] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e4d8cb96-182d-4b77-a8ac-dfd1bf52d484 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.243605] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9c1afeadf3d945a782ce2b79898ec0ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.255196] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c1afeadf3d945a782ce2b79898ec0ce [ 825.255893] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 10da1abe-1c95-44b8-a10d-ce618625b69b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.256433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 41e389630b6c47fd91e58ba72e2f9e3f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.265557] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41e389630b6c47fd91e58ba72e2f9e3f [ 825.266321] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a086f03c-c993-4e1a-8a3e-efa40bb8b8bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.266744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0df04d272d0c447bba2789009a9d53d9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.276929] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0df04d272d0c447bba2789009a9d53d9 [ 825.277655] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.278124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4c00bbfb06db42c3aa7c9f71d27ef234 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.287002] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c00bbfb06db42c3aa7c9f71d27ef234 [ 825.287717] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7e77db18-077d-4665-ad90-c4e5f470716c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.288240] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2d1955164c564905afd3ff5f5206d27c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.299484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d1955164c564905afd3ff5f5206d27c [ 825.299484] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6efdc98b-e32d-4313-b13f-95c3d4911823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 825.299649] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 825.299787] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 825.634226] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] Acquiring lock "f51cfd74-25e5-4077-9b43-8cb38fe051f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.634533] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] Lock "f51cfd74-25e5-4077-9b43-8cb38fe051f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.708363] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5025eba1-fdf2-4506-852c-4c7d9b130917 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.717907] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85b3d9e-b7b1-45f3-b1fe-17dc294dc782 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.748425] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bb7dd3-c32b-4276-94e8-e6efd370a11a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.755619] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189dad43-2af9-4b93-a463-8f894d9315cf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.769418] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.769990] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3c0ae1667570448391b63636c76b0c59 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.777235] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c0ae1667570448391b63636c76b0c59 [ 825.778122] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 825.780450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cae93c0fcca14ce7836f94537a46f9e2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 825.792025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cae93c0fcca14ce7836f94537a46f9e2 [ 825.792476] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 825.792656] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.764s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.764937] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 826.924218] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 826.928830] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.769720] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "db7db1c9-6716-4591-b669-b85dd595a3e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.770034] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "db7db1c9-6716-4591-b669-b85dd595a3e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.833940] env[61649]: DEBUG oslo_concurrency.lockutils [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] Acquiring lock "899db5f5-4963-4f7a-97d2-9c2dfd7a6981" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.834254] env[61649]: DEBUG oslo_concurrency.lockutils [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] Lock "899db5f5-4963-4f7a-97d2-9c2dfd7a6981" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.388760] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.389028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.869605] env[61649]: WARNING oslo_vmware.rw_handles [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 860.869605] env[61649]: ERROR oslo_vmware.rw_handles [ 860.869605] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 860.871094] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 860.871370] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Copying Virtual Disk [datastore1] vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/34076d73-8a09-4613-ad5a-ce2f9f689e17/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 860.871661] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e789a699-0c26-404b-b07f-771547b42bcc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.879876] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Waiting for the task: (returnval){ [ 860.879876] env[61649]: value = "task-158141" [ 860.879876] env[61649]: _type = "Task" [ 860.879876] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.887607] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Task: {'id': task-158141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.390299] env[61649]: DEBUG oslo_vmware.exceptions [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 861.390838] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.391122] env[61649]: ERROR nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 861.391122] env[61649]: Faults: ['InvalidArgument'] [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Traceback (most recent call last): [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] yield resources [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self.driver.spawn(context, instance, image_meta, [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self._fetch_image_if_missing(context, vi) [ 861.391122] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] image_cache(vi, tmp_image_ds_loc) [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] vm_util.copy_virtual_disk( [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] session._wait_for_task(vmdk_copy_task) [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] return self.wait_for_task(task_ref) [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] return evt.wait() [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] result = hub.switch() [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 861.391792] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] return self.greenlet.switch() [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self.f(*self.args, **self.kw) [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] raise exceptions.translate_fault(task_info.error) [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Faults: ['InvalidArgument'] [ 861.392438] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] [ 861.392438] env[61649]: INFO nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Terminating instance [ 861.393042] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.393230] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.393466] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43a97a0f-788c-41ff-bdfe-4103c5e959d5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.396782] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 861.396891] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 861.397712] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9955d4-01be-4d38-a9b4-d6d289e3da4c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.404774] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 861.404992] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2e28b92-a0a0-45cd-b6d5-f26943d08337 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.407346] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.407522] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 861.408512] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa06de3e-4480-4d74-a937-440a15a71544 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.413120] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Waiting for the task: (returnval){ [ 861.413120] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52316153-ba9c-e3b1-91ed-0d6fc3aae109" [ 861.413120] env[61649]: _type = "Task" [ 861.413120] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.420520] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52316153-ba9c-e3b1-91ed-0d6fc3aae109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.481381] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 861.481597] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 861.481769] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Deleting the datastore file [datastore1] aacbebf5-bd31-465b-b574-6c4a98b27f30 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.482041] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f036b41-0e97-46b7-bcdd-f9c27e3d1820 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.487868] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Waiting for the task: (returnval){ [ 861.487868] env[61649]: value = "task-158143" [ 861.487868] env[61649]: _type = "Task" [ 861.487868] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.496234] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Task: {'id': task-158143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.923257] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 861.923550] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Creating directory with path [datastore1] vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.923757] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d37b4439-d9ff-4616-bb9f-4b625be557a7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.934574] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Created directory with path [datastore1] vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.934773] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Fetch image to [datastore1] vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 861.934943] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 861.935666] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49526b51-074c-4c28-a732-8ecadfbf59b5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.943891] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71775c2f-cd45-429c-92bf-819a6eee6271 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.954490] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd3b812-f08d-47b1-905d-dc8f654adcf9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.985904] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccd57a1-1f1c-4c35-bab4-64be6445da80 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.997214] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ba726c2-a9a3-4363-bb2c-080bbe8f1fb1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.998897] env[61649]: DEBUG oslo_vmware.api [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Task: {'id': task-158143, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064197} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.999154] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.999334] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 861.999509] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 861.999681] env[61649]: INFO nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Took 0.60 seconds to destroy the instance on the hypervisor. [ 862.002033] env[61649]: DEBUG nova.compute.claims [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 862.002217] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.002430] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.004353] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 82254b9cc6fb41d9a799ed32f9f734d7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 862.020856] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 862.035297] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82254b9cc6fb41d9a799ed32f9f734d7 [ 862.120333] env[61649]: DEBUG nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 862.135336] env[61649]: DEBUG nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 862.135564] env[61649]: DEBUG nova.compute.provider_tree [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.148031] env[61649]: DEBUG nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 862.165473] env[61649]: DEBUG nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 862.189524] env[61649]: DEBUG oslo_vmware.rw_handles [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 862.252705] env[61649]: DEBUG oslo_vmware.rw_handles [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 862.252918] env[61649]: DEBUG oslo_vmware.rw_handles [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 862.504579] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bd079a-5916-4d28-9748-b98e446d9e62 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.512702] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5214d677-7ac3-4d77-9e98-3a6d9a2a8589 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.542510] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d347d077-01af-4325-a1d0-797af046c27e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.549513] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4078b5d5-c219-48e6-946d-bb5b3966eab0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.564731] env[61649]: DEBUG nova.compute.provider_tree [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.565235] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg d0c2ce26a12b4eca8a3354984a3c2d06 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 862.572700] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0c2ce26a12b4eca8a3354984a3c2d06 [ 862.574531] env[61649]: DEBUG nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 862.578245] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 56767c7f647740b0adb72e4a0c285fd0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 862.588436] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56767c7f647740b0adb72e4a0c285fd0 [ 862.589138] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.587s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.589665] env[61649]: ERROR nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 862.589665] env[61649]: Faults: ['InvalidArgument'] [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Traceback (most recent call last): [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self.driver.spawn(context, instance, image_meta, [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self._fetch_image_if_missing(context, vi) [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] image_cache(vi, tmp_image_ds_loc) [ 862.589665] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] vm_util.copy_virtual_disk( [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] session._wait_for_task(vmdk_copy_task) [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] return self.wait_for_task(task_ref) [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] return evt.wait() [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] result = hub.switch() [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] return self.greenlet.switch() [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 862.590125] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] self.f(*self.args, **self.kw) [ 862.590550] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 862.590550] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] raise exceptions.translate_fault(task_info.error) [ 862.590550] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 862.590550] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Faults: ['InvalidArgument'] [ 862.590550] env[61649]: ERROR nova.compute.manager [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] [ 862.590550] env[61649]: DEBUG nova.compute.utils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 862.591744] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Build of instance aacbebf5-bd31-465b-b574-6c4a98b27f30 was re-scheduled: A specified parameter was not correct: fileType [ 862.591744] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 862.592126] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 862.592294] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 862.592462] env[61649]: DEBUG nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 862.592617] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 862.919741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg c45f5ff5a8da41c58d5903857672259a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 862.927640] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c45f5ff5a8da41c58d5903857672259a [ 862.928305] env[61649]: DEBUG nova.network.neutron [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.929235] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 78d570131c5c40fdaa17185068a3e436 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 862.938568] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78d570131c5c40fdaa17185068a3e436 [ 862.939192] env[61649]: INFO nova.compute.manager [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Took 0.35 seconds to deallocate network for instance. [ 862.940876] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg a3d9c2ca86a84bb7b13bcc114e6eb52e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 862.972219] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3d9c2ca86a84bb7b13bcc114e6eb52e [ 862.974804] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg c81b7214a39d404a8a896d1b14751bd5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.008027] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c81b7214a39d404a8a896d1b14751bd5 [ 863.025876] env[61649]: INFO nova.scheduler.client.report [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Deleted allocations for instance aacbebf5-bd31-465b-b574-6c4a98b27f30 [ 863.034214] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg ba03cef336734845910b2c0957f19e41 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.048483] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba03cef336734845910b2c0957f19e41 [ 863.049175] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fd1251b0-ff97-457c-aa6a-7a03932647d4 tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 291.538s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.049785] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] Expecting reply to msg f6bf3d3013644a89854ce925d7eb6a01 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.050845] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 92.573s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.050845] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Acquiring lock "aacbebf5-bd31-465b-b574-6c4a98b27f30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.050969] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.051141] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.053146] env[61649]: INFO nova.compute.manager [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Terminating instance [ 863.059656] env[61649]: DEBUG nova.compute.manager [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 863.059656] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 863.059656] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cad9a8af-bd91-47ca-83d6-6b211bc95ec8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.060349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6bf3d3013644a89854ce925d7eb6a01 [ 863.060894] env[61649]: DEBUG nova.compute.manager [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] [instance: 9142a98b-6400-4cd2-b21f-29a435f95503] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.063444] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] Expecting reply to msg 1e9da40b3eb548908eeca9c059b380f0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.069044] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b18ad0-c223-4bba-b36a-48a2d8c5c831 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.099866] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e9da40b3eb548908eeca9c059b380f0 [ 863.100451] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aacbebf5-bd31-465b-b574-6c4a98b27f30 could not be found. [ 863.100640] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 863.100815] env[61649]: INFO nova.compute.manager [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Took 0.04 seconds to destroy the instance on the hypervisor. [ 863.101059] env[61649]: DEBUG oslo.service.loopingcall [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.101452] env[61649]: DEBUG nova.compute.manager [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] [instance: 9142a98b-6400-4cd2-b21f-29a435f95503] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.101784] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] Expecting reply to msg 9cd1390717854eb0bfd97177965c8e78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.102495] env[61649]: DEBUG nova.compute.manager [-] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 863.102601] env[61649]: DEBUG nova.network.neutron [-] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 863.109773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cd1390717854eb0bfd97177965c8e78 [ 863.120833] env[61649]: DEBUG oslo_concurrency.lockutils [None req-744afbc0-0f61-4b66-992d-8f196256caea tempest-ServersTestManualDisk-69134108 tempest-ServersTestManualDisk-69134108-project-member] Lock "9142a98b-6400-4cd2-b21f-29a435f95503" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.262s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.121365] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Expecting reply to msg ab31288a8b4641dea3cd993702affef9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.130604] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab31288a8b4641dea3cd993702affef9 [ 863.131030] env[61649]: DEBUG nova.compute.manager [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] [instance: 8999b9ee-ae7e-4438-80b7-dffdb3e92630] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.132835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Expecting reply to msg df7c67b598ca4bf1bf5ba6d23db59a1f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.138149] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1c9f07f8b99445ea8d5fe134b30fd6fe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.145309] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c9f07f8b99445ea8d5fe134b30fd6fe [ 863.145628] env[61649]: DEBUG nova.network.neutron [-] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.145982] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg abab6748de0d42e79731e85cc4820513 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.155677] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df7c67b598ca4bf1bf5ba6d23db59a1f [ 863.156171] env[61649]: DEBUG nova.compute.manager [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] [instance: 8999b9ee-ae7e-4438-80b7-dffdb3e92630] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.156478] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Expecting reply to msg de214ac2a11342d3a46c4c2851934c5e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.158398] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abab6748de0d42e79731e85cc4820513 [ 863.158812] env[61649]: INFO nova.compute.manager [-] [instance: aacbebf5-bd31-465b-b574-6c4a98b27f30] Took 0.06 seconds to deallocate network for instance. [ 863.162221] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg a59444a8164149d1896d5f783eb9d3e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.166653] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de214ac2a11342d3a46c4c2851934c5e [ 863.180715] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0ca551bb-e9bd-40e5-8df6-fd6ff012a59e tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Lock "8999b9ee-ae7e-4438-80b7-dffdb3e92630" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.342s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.181323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] Expecting reply to msg 7097418dfdef45b9b6aed2b8c47aa625 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.190750] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a59444a8164149d1896d5f783eb9d3e3 [ 863.194706] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7097418dfdef45b9b6aed2b8c47aa625 [ 863.195746] env[61649]: DEBUG nova.compute.manager [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] [instance: 018ab9c2-8c6d-4836-9e26-70ffc33b9b30] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.197787] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] Expecting reply to msg 69b3483265a94fbb8177af141b66340e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.205787] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 18d8f6701d3e42dfa25811a507fb7c93 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.220959] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69b3483265a94fbb8177af141b66340e [ 863.221524] env[61649]: DEBUG nova.compute.manager [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] [instance: 018ab9c2-8c6d-4836-9e26-70ffc33b9b30] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.221921] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] Expecting reply to msg 869746ff720a4d2ea7c20bf21cc2463c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.233876] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 869746ff720a4d2ea7c20bf21cc2463c [ 863.243954] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18d8f6701d3e42dfa25811a507fb7c93 [ 863.247095] env[61649]: DEBUG oslo_concurrency.lockutils [None req-930b1e75-3a85-4d46-94b4-06b7fe92b3a0 tempest-ServersTestJSON-1955261971 tempest-ServersTestJSON-1955261971-project-member] Lock "018ab9c2-8c6d-4836-9e26-70ffc33b9b30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.070s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.247841] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg 40fdaf0e4c1145a48b5e11ba47924913 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.248707] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Lock "aacbebf5-bd31-465b-b574-6c4a98b27f30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.249033] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a1565ac7-a1fc-4f0e-b546-a57bc1e33efb tempest-FloatingIPsAssociationTestJSON-1435087651 tempest-FloatingIPsAssociationTestJSON-1435087651-project-member] Expecting reply to msg 99badae48ffd431d9cc5d05be0599e9e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.255349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40fdaf0e4c1145a48b5e11ba47924913 [ 863.256405] env[61649]: DEBUG nova.compute.manager [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] [instance: 93bf61a5-0737-4495-854d-14f1feebab86] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.257404] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg 0f32356a93514b2b85a12efb0f815d42 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.261854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99badae48ffd431d9cc5d05be0599e9e [ 863.277261] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f32356a93514b2b85a12efb0f815d42 [ 863.277806] env[61649]: DEBUG nova.compute.manager [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] [instance: 93bf61a5-0737-4495-854d-14f1feebab86] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.278141] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg 280b968fd7534b81b3d7898cac79ac5b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.287456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 280b968fd7534b81b3d7898cac79ac5b [ 863.298310] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Lock "93bf61a5-0737-4495-854d-14f1feebab86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.263s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.298874] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg d631cf145c434e88aa1d4b0432c74f81 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.309597] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d631cf145c434e88aa1d4b0432c74f81 [ 863.310100] env[61649]: DEBUG nova.compute.manager [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] [instance: f814cbd2-8d20-4a26-9bae-000a70a3e082] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.311806] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg 27ee4ed99d1e4c349f56bbbdcc7cf74b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.333126] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27ee4ed99d1e4c349f56bbbdcc7cf74b [ 863.333687] env[61649]: DEBUG nova.compute.manager [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] [instance: f814cbd2-8d20-4a26-9bae-000a70a3e082] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.334013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg 87727f307abd4725a1543f02f1575858 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.345662] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87727f307abd4725a1543f02f1575858 [ 863.356711] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Lock "f814cbd2-8d20-4a26-9bae-000a70a3e082" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.290s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.357263] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg cab92896eeb543b4868fb1e3175ba285 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.367789] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cab92896eeb543b4868fb1e3175ba285 [ 863.368260] env[61649]: DEBUG nova.compute.manager [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] [instance: 0fc6dad2-0cde-46db-b840-3bd2737a91af] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.369896] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg b0a2a648d5b344098ee7e564756de8f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.393561] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0a2a648d5b344098ee7e564756de8f1 [ 863.394143] env[61649]: DEBUG nova.compute.manager [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] [instance: 0fc6dad2-0cde-46db-b840-3bd2737a91af] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.394465] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Expecting reply to msg 392526b289a5484d8623b5baefa3b4f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.404473] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 392526b289a5484d8623b5baefa3b4f7 [ 863.415491] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5a8faaf4-6b65-42c3-8b17-eed0a9b1450b tempest-ListServersNegativeTestJSON-2013351456 tempest-ListServersNegativeTestJSON-2013351456-project-member] Lock "0fc6dad2-0cde-46db-b840-3bd2737a91af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.320s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.416041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] Expecting reply to msg dbb011b430c747a2b3130d4596df948d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.425402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbb011b430c747a2b3130d4596df948d [ 863.426197] env[61649]: DEBUG nova.compute.manager [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] [instance: f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.427952] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] Expecting reply to msg 05170e08e34541dda13c6caa115e71e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.451448] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05170e08e34541dda13c6caa115e71e3 [ 863.452046] env[61649]: DEBUG nova.compute.manager [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] [instance: f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.452394] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] Expecting reply to msg d864ab9e8df14ebcaae93bbd7736b982 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.462128] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d864ab9e8df14ebcaae93bbd7736b982 [ 863.473581] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0c55e1f7-81bf-4c1a-8c95-b69fb287ecb9 tempest-ServersNegativeTestJSON-461174736 tempest-ServersNegativeTestJSON-461174736-project-member] Lock "f09ee30f-7a9b-4c2e-a0b6-da2a711b9b4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.736s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.474143] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] Expecting reply to msg 659d94992249434f81cc818f3adbeb1a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.482623] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 659d94992249434f81cc818f3adbeb1a [ 863.483083] env[61649]: DEBUG nova.compute.manager [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] [instance: 28428441-219c-4627-857e-ab8b91390c68] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.484860] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] Expecting reply to msg 04810c00c7264ffe930ae5d641f8f6ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.515887] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04810c00c7264ffe930ae5d641f8f6ba [ 863.516541] env[61649]: DEBUG nova.compute.manager [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] [instance: 28428441-219c-4627-857e-ab8b91390c68] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.516892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] Expecting reply to msg db873a7c1a3d46c6a5b37eedf1d7e55e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.529555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db873a7c1a3d46c6a5b37eedf1d7e55e [ 863.540686] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d1bf3f5-69a3-46b1-a25f-1249767c4d6b tempest-FloatingIPsAssociationNegativeTestJSON-57694453 tempest-FloatingIPsAssociationNegativeTestJSON-57694453-project-member] Lock "28428441-219c-4627-857e-ab8b91390c68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.191s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.541260] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] Expecting reply to msg 98775114d0a74d7e9737b550dc5e31a3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.549691] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98775114d0a74d7e9737b550dc5e31a3 [ 863.550146] env[61649]: DEBUG nova.compute.manager [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] [instance: 5e9ab69f-856e-4b8d-808a-0799b87a9cc6] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.551823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] Expecting reply to msg eff39d05b2694066b615cbc30fa89907 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.575021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eff39d05b2694066b615cbc30fa89907 [ 863.575590] env[61649]: DEBUG nova.compute.manager [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] [instance: 5e9ab69f-856e-4b8d-808a-0799b87a9cc6] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.575931] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] Expecting reply to msg b966c346e87b48c784e9b37a17d60bc0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.585882] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b966c346e87b48c784e9b37a17d60bc0 [ 863.596192] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f489945d-6872-4187-a70f-717312fbcd11 tempest-ServerDiagnosticsNegativeTest-777670579 tempest-ServerDiagnosticsNegativeTest-777670579-project-member] Lock "5e9ab69f-856e-4b8d-808a-0799b87a9cc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.793s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.596721] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] Expecting reply to msg 0c20ebd0f4c94cf386c8128adb6727bd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.604875] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c20ebd0f4c94cf386c8128adb6727bd [ 863.605298] env[61649]: DEBUG nova.compute.manager [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] [instance: b176b001-3c32-439a-b6cd-9b608a0ac623] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.606904] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] Expecting reply to msg f1d17c72c68c49009bceff7b8494ee28 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.636345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1d17c72c68c49009bceff7b8494ee28 [ 863.637057] env[61649]: DEBUG nova.compute.manager [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] [instance: b176b001-3c32-439a-b6cd-9b608a0ac623] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.637449] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] Expecting reply to msg e13672a197b44271b15c6653d1edfd55 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.647597] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e13672a197b44271b15c6653d1edfd55 [ 863.659043] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9b1ac097-bfad-47de-bf7d-b61097337ef8 tempest-ServerShowV254Test-1377112975 tempest-ServerShowV254Test-1377112975-project-member] Lock "b176b001-3c32-439a-b6cd-9b608a0ac623" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.173s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.659711] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 9b652dfa4b2b4ed9905e7c49d04b26bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.668337] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b652dfa4b2b4ed9905e7c49d04b26bc [ 863.668865] env[61649]: DEBUG nova.compute.manager [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: 95dad1e2-74d2-478f-8095-23a26770e27f] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.670799] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg bd99e262ee6949cb918148d47e6910ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.694651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd99e262ee6949cb918148d47e6910ac [ 863.695355] env[61649]: DEBUG nova.compute.manager [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] [instance: 95dad1e2-74d2-478f-8095-23a26770e27f] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 863.695772] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Expecting reply to msg 3e3548e2bdd942fba7ee5d87dcda2185 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.708132] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e3548e2bdd942fba7ee5d87dcda2185 [ 863.724529] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4b0cf67f-82f7-4586-bdc6-38b2400e51c7 tempest-DeleteServersAdminTestJSON-500581992 tempest-DeleteServersAdminTestJSON-500581992-project-member] Lock "95dad1e2-74d2-478f-8095-23a26770e27f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.384s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.725191] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg dce2fbda06c44ece87ddafc7784f19b9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.734350] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dce2fbda06c44ece87ddafc7784f19b9 [ 863.734878] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 863.741031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 62be5e96c7514f8fb11e86f6b59daba9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.776500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62be5e96c7514f8fb11e86f6b59daba9 [ 863.791968] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.792333] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.793925] env[61649]: INFO nova.compute.claims [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.795779] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 33bd88198686487b8c85d8264689dc31 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.828356] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33bd88198686487b8c85d8264689dc31 [ 863.830314] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 54d3dfe1446f4c3d90a5f78e59489cab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 863.838037] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54d3dfe1446f4c3d90a5f78e59489cab [ 864.134025] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84755558-4a5d-43ae-8dc3-3684d53bb79a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.142654] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd38a3ac-aee4-4d16-8f41-47e2ee49cfc8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.180797] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86894fd0-3fe6-4a06-baa5-39996cbbf137 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.189065] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55169b78-2f35-4c77-b6c7-f6add972afee {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.204998] env[61649]: DEBUG nova.compute.provider_tree [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.205560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg f995ef693a064601b2d863cf928f18a6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 864.213329] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f995ef693a064601b2d863cf928f18a6 [ 864.214240] env[61649]: DEBUG nova.scheduler.client.report [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 864.216570] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 31d32e0c1dad442598f64bbc59bda581 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 864.227873] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31d32e0c1dad442598f64bbc59bda581 [ 864.228599] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.436s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.229053] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 864.230829] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg c26b9981edc04f5e8d5a6c40169bd5eb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 864.261703] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c26b9981edc04f5e8d5a6c40169bd5eb [ 864.263387] env[61649]: DEBUG nova.compute.utils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.263969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 910756da165e4f988f54bda8a7ef9e10 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 864.264967] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 864.265133] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 864.272819] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 910756da165e4f988f54bda8a7ef9e10 [ 864.273337] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 864.274997] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 98e016a580d44c839a3682c0f9f95e7b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 864.303371] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98e016a580d44c839a3682c0f9f95e7b [ 864.306049] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 9911a5ef8330484ebfb3c15e100f1b6d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 864.360404] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9911a5ef8330484ebfb3c15e100f1b6d [ 864.361716] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 864.392365] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.392610] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.392766] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.392946] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.393091] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.393234] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.393441] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.393598] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.393801] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.393988] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.394162] env[61649]: DEBUG nova.virt.hardware [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.395015] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b03984-00e6-4b5d-9e31-454f5ec154a5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.403528] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4476ee-d83c-4cd9-9dc6-c5a74899238e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.423504] env[61649]: DEBUG nova.policy [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73f73ecceb844895bbdf126203901353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca010f93a4ee40db98cd5885b47d21d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 865.067929] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Successfully created port: 18e04808-0d6b-420a-938c-62437e2fa0bd {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.334610] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Successfully updated port: 18e04808-0d6b-420a-938c-62437e2fa0bd {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.334610] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 36b017a722ff4b7db3d7f28dcfdf0dee in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 866.343628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36b017a722ff4b7db3d7f28dcfdf0dee [ 866.343628] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "refresh_cache-29f84900-0805-4ab2-af4d-bd7be2ac94d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.343628] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "refresh_cache-29f84900-0805-4ab2-af4d-bd7be2ac94d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.343628] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 866.343628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg cd8f9b2e5d0f42d08c6a88f86e23831a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 866.350070] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd8f9b2e5d0f42d08c6a88f86e23831a [ 866.375201] env[61649]: DEBUG nova.compute.manager [req-0036acbd-a9c2-416c-80d9-1362cab79737 req-e3222cc5-6dee-4f7c-a3da-08cd49b25e95 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Received event network-vif-plugged-18e04808-0d6b-420a-938c-62437e2fa0bd {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 866.375481] env[61649]: DEBUG oslo_concurrency.lockutils [req-0036acbd-a9c2-416c-80d9-1362cab79737 req-e3222cc5-6dee-4f7c-a3da-08cd49b25e95 service nova] Acquiring lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.375765] env[61649]: DEBUG oslo_concurrency.lockutils [req-0036acbd-a9c2-416c-80d9-1362cab79737 req-e3222cc5-6dee-4f7c-a3da-08cd49b25e95 service nova] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.375995] env[61649]: DEBUG oslo_concurrency.lockutils [req-0036acbd-a9c2-416c-80d9-1362cab79737 req-e3222cc5-6dee-4f7c-a3da-08cd49b25e95 service nova] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.376253] env[61649]: DEBUG nova.compute.manager [req-0036acbd-a9c2-416c-80d9-1362cab79737 req-e3222cc5-6dee-4f7c-a3da-08cd49b25e95 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] No waiting events found dispatching network-vif-plugged-18e04808-0d6b-420a-938c-62437e2fa0bd {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.376482] env[61649]: WARNING nova.compute.manager [req-0036acbd-a9c2-416c-80d9-1362cab79737 req-e3222cc5-6dee-4f7c-a3da-08cd49b25e95 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Received unexpected event network-vif-plugged-18e04808-0d6b-420a-938c-62437e2fa0bd for instance with vm_state building and task_state spawning. [ 866.477979] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 866.704894] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Updating instance_info_cache with network_info: [{"id": "18e04808-0d6b-420a-938c-62437e2fa0bd", "address": "fa:16:3e:54:4e:4b", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18e04808-0d", "ovs_interfaceid": "18e04808-0d6b-420a-938c-62437e2fa0bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.705443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 471ab37ffbc74c4c90774d74d2df3b33 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 866.719383] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 471ab37ffbc74c4c90774d74d2df3b33 [ 866.720279] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "refresh_cache-29f84900-0805-4ab2-af4d-bd7be2ac94d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.720565] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance network_info: |[{"id": "18e04808-0d6b-420a-938c-62437e2fa0bd", "address": "fa:16:3e:54:4e:4b", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18e04808-0d", "ovs_interfaceid": "18e04808-0d6b-420a-938c-62437e2fa0bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 866.721224] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:4e:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604056d6-6dd6-47fa-9eaa-6863a3a7c488', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18e04808-0d6b-420a-938c-62437e2fa0bd', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.729898] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating folder: Project (ca010f93a4ee40db98cd5885b47d21d2). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 866.733282] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-258dca65-4e04-4226-96b8-78c60b6ea864 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.747693] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created folder: Project (ca010f93a4ee40db98cd5885b47d21d2) in parent group-v51588. [ 866.747898] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating folder: Instances. Parent ref: group-v51632. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 866.748200] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15d89646-5ae0-452c-964d-79eed057f495 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.757910] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created folder: Instances in parent group-v51632. [ 866.757910] env[61649]: DEBUG oslo.service.loopingcall [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.757910] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 866.758072] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b38d0394-ab54-4093-ba2a-dc73d7c610cb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.779567] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.779567] env[61649]: value = "task-158146" [ 866.779567] env[61649]: _type = "Task" [ 866.779567] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.787123] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158146, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.288678] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158146, 'name': CreateVM_Task} progress is 25%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.789226] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158146, 'name': CreateVM_Task, 'duration_secs': 0.63205} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.789471] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 867.789983] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.790211] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.790660] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.791028] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80bbcbf9-f1c4-4c5e-87e2-f79587897731 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.796033] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 867.796033] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a12406-f85a-02e1-cbed-1e4cfb327777" [ 867.796033] env[61649]: _type = "Task" [ 867.796033] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.803659] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a12406-f85a-02e1-cbed-1e4cfb327777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.306341] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.306596] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.306807] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.602533] env[61649]: DEBUG nova.compute.manager [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Received event network-changed-18e04808-0d6b-420a-938c-62437e2fa0bd {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 868.602533] env[61649]: DEBUG nova.compute.manager [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Refreshing instance network info cache due to event network-changed-18e04808-0d6b-420a-938c-62437e2fa0bd. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 868.602533] env[61649]: DEBUG oslo_concurrency.lockutils [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] Acquiring lock "refresh_cache-29f84900-0805-4ab2-af4d-bd7be2ac94d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.602533] env[61649]: DEBUG oslo_concurrency.lockutils [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] Acquired lock "refresh_cache-29f84900-0805-4ab2-af4d-bd7be2ac94d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.602533] env[61649]: DEBUG nova.network.neutron [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Refreshing network info cache for port 18e04808-0d6b-420a-938c-62437e2fa0bd {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 868.603197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] Expecting reply to msg 0ce7b2e7a40b48c19fb5bb010b5d41d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 868.609806] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ce7b2e7a40b48c19fb5bb010b5d41d3 [ 868.978575] env[61649]: DEBUG nova.network.neutron [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Updated VIF entry in instance network info cache for port 18e04808-0d6b-420a-938c-62437e2fa0bd. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 868.978938] env[61649]: DEBUG nova.network.neutron [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Updating instance_info_cache with network_info: [{"id": "18e04808-0d6b-420a-938c-62437e2fa0bd", "address": "fa:16:3e:54:4e:4b", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18e04808-0d", "ovs_interfaceid": "18e04808-0d6b-420a-938c-62437e2fa0bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.979527] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] Expecting reply to msg 808d6e5c1908463096d3af76248958e2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 868.987533] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 808d6e5c1908463096d3af76248958e2 [ 868.988201] env[61649]: DEBUG oslo_concurrency.lockutils [req-043083db-3417-4dc3-a934-cf0954a47fcd req-16714919-9beb-465b-829e-c7eb8fb24a41 service nova] Releasing lock "refresh_cache-29f84900-0805-4ab2-af4d-bd7be2ac94d3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.023363] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.023634] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.924208] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.924875] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b520a5d15c2b42aaa5ed5b1c008c901b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 883.943507] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b520a5d15c2b42aaa5ed5b1c008c901b [ 884.928613] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 884.928889] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 884.928935] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 884.929514] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e8cb64dc26254c688492410edd693f0e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 884.947983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8cb64dc26254c688492410edd693f0e [ 884.950879] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951023] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951155] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951282] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951442] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951641] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951783] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.951907] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.952124] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.952278] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 884.952399] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 885.929298] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.929616] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.930022] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.930417] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.930514] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 886.930738] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.931040] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b4d33e24a94444baa597bb29e28f17c1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 886.939782] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4d33e24a94444baa597bb29e28f17c1 [ 886.941490] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.941490] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.941627] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.941776] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 886.942872] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046c9fa6-9cf7-43d4-b8c8-933e5a4447de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.950701] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a92ca2a-99bd-4d82-8842-b74dfdf001b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.967757] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c125ea0e-8a1c-458e-a0b0-b93e9793171f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.989611] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2036e1e-3fe7-4a59-9bb9-ce0c581e420e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.023686] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181796MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 887.023831] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.024046] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.024896] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 40213fa0a04445c9b88470aeca863245 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.060014] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40213fa0a04445c9b88470aeca863245 [ 887.063201] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5d1dae60a794426d87d840536aefb5d0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.072849] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d1dae60a794426d87d840536aefb5d0 [ 887.094642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094909] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094909] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094909] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.094909] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.095055] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.095055] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 887.095055] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b90920785bad4c48a9c24c73b5fefe71 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.107377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b90920785bad4c48a9c24c73b5fefe71 [ 887.107377] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.107377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7090499daeb64f2f851f788bcb4a6774 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.122898] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7090499daeb64f2f851f788bcb4a6774 [ 887.122898] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.122898] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg dd748cbdc2714502a4df33f7dba5c072 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.133898] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd748cbdc2714502a4df33f7dba5c072 [ 887.133898] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d7a1c8c3-1694-4704-8414-098af751c05e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.133898] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 19a2d18f59264ae09abcdb787901bbac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.145752] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19a2d18f59264ae09abcdb787901bbac [ 887.145752] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e4d8cb96-182d-4b77-a8ac-dfd1bf52d484 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.145752] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg faa944c526884e5c997ed9457034686a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.156493] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faa944c526884e5c997ed9457034686a [ 887.156493] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 10da1abe-1c95-44b8-a10d-ce618625b69b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.156493] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5c57331db636475aa3386cf5631341ef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.174039] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c57331db636475aa3386cf5631341ef [ 887.174039] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a086f03c-c993-4e1a-8a3e-efa40bb8b8bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.174039] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 271daa95d84c496a98d1b3a42926058b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.183354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 271daa95d84c496a98d1b3a42926058b [ 887.184289] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.184966] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 56b005339b554d0d85461bf48d029197 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.194510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56b005339b554d0d85461bf48d029197 [ 887.195357] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7e77db18-077d-4665-ad90-c4e5f470716c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.195992] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3f70d4cc3ae1447d9fc1a88d5713965d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.205357] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f70d4cc3ae1447d9fc1a88d5713965d [ 887.206514] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6efdc98b-e32d-4313-b13f-95c3d4911823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.207169] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e462d3817e2e4b9aaf15e7f2af4b227a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.219239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e462d3817e2e4b9aaf15e7f2af4b227a [ 887.220156] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f51cfd74-25e5-4077-9b43-8cb38fe051f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.220835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 07d69bc94b19429dbd0ccd5fd213174b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.232431] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07d69bc94b19429dbd0ccd5fd213174b [ 887.233247] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db7db1c9-6716-4591-b669-b85dd595a3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.234096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 383dd7527e7244fe828d6508b021e52d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.249198] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 383dd7527e7244fe828d6508b021e52d [ 887.250101] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 899db5f5-4963-4f7a-97d2-9c2dfd7a6981 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.250982] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 45a54457c73e44349be1fd4e9f591277 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.270647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45a54457c73e44349be1fd4e9f591277 [ 887.271672] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.272339] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fd3726c5e9e2481a9e276ca5d45c8acd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.283599] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd3726c5e9e2481a9e276ca5d45c8acd [ 887.284535] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 887.284933] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 887.285225] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 887.579818] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c041a88e-4702-48ff-aac9-1a202210bbde {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.587545] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ff159f-6a8c-4734-a6dd-b08a54bb8df5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.618558] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2e4dd2-b300-4452-ba6e-420c959aa595 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.626788] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef623e-49bf-4e80-9411-56723bf216b7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.640125] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.640635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d5dcc0cc38f847ee83ddcf459258ee15 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.648486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5dcc0cc38f847ee83ddcf459258ee15 [ 887.649442] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 887.651773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6019e69c9d76462893218dfd1c9e2a81 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 887.664659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6019e69c9d76462893218dfd1c9e2a81 [ 887.665379] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 887.665555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.642s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.659723] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.660115] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.929386] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 890.982436] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cc203160d04c45b190a99fb473a09841 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 890.997223] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc203160d04c45b190a99fb473a09841 [ 891.721140] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] Acquiring lock "3e1127c5-876f-47b9-b652-7a558711a1a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.721381] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] Lock "3e1127c5-876f-47b9-b652-7a558711a1a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.935446] env[61649]: WARNING oslo_vmware.rw_handles [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 909.935446] env[61649]: ERROR oslo_vmware.rw_handles [ 909.935446] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 909.937130] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 909.937400] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Copying Virtual Disk [datastore1] vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/6ddbeb2f-635d-407a-99d9-6b9928176b75/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 909.937669] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d5862b7-def4-4af2-8c12-2e2d84bfabb5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.945110] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Waiting for the task: (returnval){ [ 909.945110] env[61649]: value = "task-158158" [ 909.945110] env[61649]: _type = "Task" [ 909.945110] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.953615] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Task: {'id': task-158158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.455815] env[61649]: DEBUG oslo_vmware.exceptions [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 910.456120] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.456669] env[61649]: ERROR nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 910.456669] env[61649]: Faults: ['InvalidArgument'] [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Traceback (most recent call last): [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] yield resources [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self.driver.spawn(context, instance, image_meta, [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self._fetch_image_if_missing(context, vi) [ 910.456669] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] image_cache(vi, tmp_image_ds_loc) [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] vm_util.copy_virtual_disk( [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] session._wait_for_task(vmdk_copy_task) [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] return self.wait_for_task(task_ref) [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] return evt.wait() [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] result = hub.switch() [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 910.457147] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] return self.greenlet.switch() [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self.f(*self.args, **self.kw) [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] raise exceptions.translate_fault(task_info.error) [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Faults: ['InvalidArgument'] [ 910.457672] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] [ 910.457672] env[61649]: INFO nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Terminating instance [ 910.458752] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.458829] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.459031] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4e00994-0999-44bb-9a00-66b3715afdb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.461519] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 910.461710] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 910.462430] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2090e2ea-98c1-4937-bfb7-ed55c5e131b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.469044] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 910.469281] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81018816-76d5-4203-b0f7-266e7991f5d1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.471387] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.471564] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 910.472493] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77fe8b6-5820-455b-a6cb-3d6482363f18 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.478355] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Waiting for the task: (returnval){ [ 910.478355] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]527c5809-20f6-43ae-2b7c-bc0b4859efd0" [ 910.478355] env[61649]: _type = "Task" [ 910.478355] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.485496] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]527c5809-20f6-43ae-2b7c-bc0b4859efd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.540317] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 910.540584] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 910.540801] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Deleting the datastore file [datastore1] eb0c04e3-1234-445c-bfa6-e031dd0b89d3 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.541059] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0522ad57-5cf2-47db-a59a-d32ad20ca6f4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.547437] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Waiting for the task: (returnval){ [ 910.547437] env[61649]: value = "task-158160" [ 910.547437] env[61649]: _type = "Task" [ 910.547437] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.555855] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Task: {'id': task-158160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.987725] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 910.988053] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Creating directory with path [datastore1] vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.988242] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-153cab74-f74b-4a2d-babd-86d119b916a2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.998778] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Created directory with path [datastore1] vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.998976] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Fetch image to [datastore1] vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 910.999150] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 910.999895] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9027ba3e-b0d4-4814-987c-304f8dbc7b27 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.006366] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426412bf-b062-4b48-bce2-34d296ec98bf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.015092] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9f7dbf-e12a-4330-bc39-aed09650d088 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.046083] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74397f6-37f7-4e1e-a545-a821eb285515 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.056776] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-19aa70bd-5674-4c9e-9850-b7b3ea9c2219 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.058448] env[61649]: DEBUG oslo_vmware.api [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Task: {'id': task-158160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061234} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.058704] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.058910] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 911.059052] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 911.059254] env[61649]: INFO nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 911.061333] env[61649]: DEBUG nova.compute.claims [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 911.061503] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.061794] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.063604] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 3b073b1de35e4b41ba93db7356480fe6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 911.084033] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 911.098123] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b073b1de35e4b41ba93db7356480fe6 [ 911.142103] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 911.205625] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 911.205825] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 911.418916] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2981d3f6-6654-4484-8fa0-d1a1068290fd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.425727] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a87c40d-29da-42b1-ab6d-3582604c3b0e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.455109] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0979974-ea58-481f-8fbe-b100d6bd94da {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.462607] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f440e34-0705-483b-b785-c93e89747588 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.477983] env[61649]: DEBUG nova.compute.provider_tree [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.478488] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 587c2303890042ef9a95965e57b791d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 911.486260] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587c2303890042ef9a95965e57b791d2 [ 911.488028] env[61649]: DEBUG nova.scheduler.client.report [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 911.489523] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 41826bbb1a8f48db876b545e2b180353 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 911.500514] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41826bbb1a8f48db876b545e2b180353 [ 911.501270] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.439s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.501809] env[61649]: ERROR nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 911.501809] env[61649]: Faults: ['InvalidArgument'] [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Traceback (most recent call last): [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self.driver.spawn(context, instance, image_meta, [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self._fetch_image_if_missing(context, vi) [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] image_cache(vi, tmp_image_ds_loc) [ 911.501809] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] vm_util.copy_virtual_disk( [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] session._wait_for_task(vmdk_copy_task) [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] return self.wait_for_task(task_ref) [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] return evt.wait() [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] result = hub.switch() [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] return self.greenlet.switch() [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 911.502071] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] self.f(*self.args, **self.kw) [ 911.502325] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 911.502325] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] raise exceptions.translate_fault(task_info.error) [ 911.502325] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 911.502325] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Faults: ['InvalidArgument'] [ 911.502325] env[61649]: ERROR nova.compute.manager [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] [ 911.502594] env[61649]: DEBUG nova.compute.utils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 911.504817] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Build of instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 was re-scheduled: A specified parameter was not correct: fileType [ 911.504817] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 911.505208] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 911.505387] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 911.505543] env[61649]: DEBUG nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 911.505705] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 912.023889] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 3bd8dac43a6344db84c13ce7eee44ead in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.037707] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bd8dac43a6344db84c13ce7eee44ead [ 912.038269] env[61649]: DEBUG nova.network.neutron [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.038744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 124e31b5dfd249c1ad886ef57f318c65 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.051578] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 124e31b5dfd249c1ad886ef57f318c65 [ 912.052248] env[61649]: INFO nova.compute.manager [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Took 0.55 seconds to deallocate network for instance. [ 912.053927] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 0f8ef2f47de047d3bf7be039570aa77f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.110133] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f8ef2f47de047d3bf7be039570aa77f [ 912.112621] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 88b75f38fa1941468927ddba25bb2cdc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.144950] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b75f38fa1941468927ddba25bb2cdc [ 912.160682] env[61649]: INFO nova.scheduler.client.report [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Deleted allocations for instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 [ 912.166644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 237567d2ceec43d2ab4b5263e94b3eba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.176043] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 237567d2ceec43d2ab4b5263e94b3eba [ 912.176550] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1fe10865-d57d-4ea4-bf1d-70c591046d57 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 337.375s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.177083] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 1539014f3c484a1c9926e19321d404a3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.177782] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 139.558s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.177995] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Acquiring lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.178203] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.178370] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.180456] env[61649]: INFO nova.compute.manager [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Terminating instance [ 912.182236] env[61649]: DEBUG nova.compute.manager [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 912.182444] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 912.182908] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81b345e6-0548-44c4-90f3-049ab79993b6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.192516] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09bb3ce-7ee6-4cb2-bb85-64294ae89760 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.206963] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1539014f3c484a1c9926e19321d404a3 [ 912.207589] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 912.210998] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 91d66903c6984b81bfc1df9048c05db1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.229732] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eb0c04e3-1234-445c-bfa6-e031dd0b89d3 could not be found. [ 912.229732] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 912.229732] env[61649]: INFO nova.compute.manager [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 912.229732] env[61649]: DEBUG oslo.service.loopingcall [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.229897] env[61649]: DEBUG nova.compute.manager [-] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 912.230003] env[61649]: DEBUG nova.network.neutron [-] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 912.244811] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91d66903c6984b81bfc1df9048c05db1 [ 912.256924] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 162033d74c404924b764a53adcfe8700 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.260357] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.260623] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.262061] env[61649]: INFO nova.compute.claims [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.263581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 7eee38e76bff4e4db10d9ddb630636d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.264970] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 162033d74c404924b764a53adcfe8700 [ 912.265357] env[61649]: DEBUG nova.network.neutron [-] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.265678] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dca7920190244363bc8379622b389a23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.274211] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dca7920190244363bc8379622b389a23 [ 912.274436] env[61649]: INFO nova.compute.manager [-] [instance: eb0c04e3-1234-445c-bfa6-e031dd0b89d3] Took 0.04 seconds to deallocate network for instance. [ 912.278051] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg f875cf69f2484ca4b2b7080984b968c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.313617] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eee38e76bff4e4db10d9ddb630636d2 [ 912.315423] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 708869885ba34b07be09ef6a008fcecf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.318086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f875cf69f2484ca4b2b7080984b968c7 [ 912.322845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 708869885ba34b07be09ef6a008fcecf [ 912.332196] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 165489c0c8214ecebdfe19a0eab23e7f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.390672] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 165489c0c8214ecebdfe19a0eab23e7f [ 912.393534] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Lock "eb0c04e3-1234-445c-bfa6-e031dd0b89d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.393876] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5ae5c19b-3a9a-41b1-af66-e4c59ae16f42 tempest-ServerExternalEventsTest-216908951 tempest-ServerExternalEventsTest-216908951-project-member] Expecting reply to msg 870d41578cc14e43bc563180f719ecbc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.404473] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 870d41578cc14e43bc563180f719ecbc [ 912.608697] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b60381-45a8-46bc-97dc-c4fdbc362077 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.616278] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dce4e1-2db3-4957-8a0f-f82e71063a39 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.648469] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2e3b6a-c510-41e3-920b-6028621696a6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.658605] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462e9c1a-1f8d-401b-9d07-41ad9c145372 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.671610] env[61649]: DEBUG nova.compute.provider_tree [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.672106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg a437df84ba5d451f8d1c458a6d46dcbf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.679532] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a437df84ba5d451f8d1c458a6d46dcbf [ 912.680469] env[61649]: DEBUG nova.scheduler.client.report [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 912.682704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 8be87fbd4e7e4619bd41ad3e53987ab4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.696233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8be87fbd4e7e4619bd41ad3e53987ab4 [ 912.696993] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.436s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.697487] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 912.699112] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg a31e22f93b9041e9ace5799e04b10f4b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.726088] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a31e22f93b9041e9ace5799e04b10f4b [ 912.727667] env[61649]: DEBUG nova.compute.utils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.728276] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 9000df85ced14a6c8f7ab7f11dfee249 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.729279] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 912.729459] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 912.741328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9000df85ced14a6c8f7ab7f11dfee249 [ 912.741862] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 912.743425] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg b8a3779fbe834c6cb871ef8b8dd56b94 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.769953] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8a3779fbe834c6cb871ef8b8dd56b94 [ 912.772508] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg e5cd87a0bb8745b5a56feafeb03cc648 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 912.787258] env[61649]: DEBUG nova.policy [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab858a27c79c4681be1f9453cc143bdd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30ca32c18fcb46e08101de297b1771a5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 912.802075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5cd87a0bb8745b5a56feafeb03cc648 [ 912.803083] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 912.823839] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 912.824094] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 912.824257] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.824440] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 912.824626] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.824798] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 912.825004] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 912.825164] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 912.825331] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 912.825510] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 912.825682] env[61649]: DEBUG nova.virt.hardware [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 912.826510] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9794c8d6-c392-4b8d-a069-a9ac4c423131 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.833979] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b999acc0-6ea8-42ef-a07c-04010e5e84b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.167011] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Successfully created port: c4e8ab72-d146-4b4e-8976-397278cbadb2 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.858698] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Successfully updated port: c4e8ab72-d146-4b4e-8976-397278cbadb2 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.859253] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 3e74ebce810e415ca5a187ac52613c96 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 913.866540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e74ebce810e415ca5a187ac52613c96 [ 913.867212] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "refresh_cache-bf2399eb-b2df-43b3-bddd-48692825c40a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.867350] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquired lock "refresh_cache-bf2399eb-b2df-43b3-bddd-48692825c40a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.867496] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 913.867989] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 6cdd33a00a1e4515bdf68668a7590645 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 913.876671] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cdd33a00a1e4515bdf68668a7590645 [ 913.907220] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 914.141904] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Updating instance_info_cache with network_info: [{"id": "c4e8ab72-d146-4b4e-8976-397278cbadb2", "address": "fa:16:3e:38:e6:b0", "network": {"id": "678fbb0a-0a80-49c6-b1e9-d15eb9090137", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1249474007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30ca32c18fcb46e08101de297b1771a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8480e3f5-68bd-4c27-ae1f-7c994a8202b1", "external-id": "nsx-vlan-transportzone-628", "segmentation_id": 628, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4e8ab72-d1", "ovs_interfaceid": "c4e8ab72-d146-4b4e-8976-397278cbadb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.142634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 931e5b10c98543ad85a69501f93e9446 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 914.153935] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 931e5b10c98543ad85a69501f93e9446 [ 914.154536] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Releasing lock "refresh_cache-bf2399eb-b2df-43b3-bddd-48692825c40a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.154816] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance network_info: |[{"id": "c4e8ab72-d146-4b4e-8976-397278cbadb2", "address": "fa:16:3e:38:e6:b0", "network": {"id": "678fbb0a-0a80-49c6-b1e9-d15eb9090137", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1249474007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30ca32c18fcb46e08101de297b1771a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8480e3f5-68bd-4c27-ae1f-7c994a8202b1", "external-id": "nsx-vlan-transportzone-628", "segmentation_id": 628, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4e8ab72-d1", "ovs_interfaceid": "c4e8ab72-d146-4b4e-8976-397278cbadb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 914.155198] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:e6:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8480e3f5-68bd-4c27-ae1f-7c994a8202b1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4e8ab72-d146-4b4e-8976-397278cbadb2', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.162752] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Creating folder: Project (30ca32c18fcb46e08101de297b1771a5). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 914.163283] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e67a09c-92ba-42a0-9403-bbab16965322 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.175568] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Created folder: Project (30ca32c18fcb46e08101de297b1771a5) in parent group-v51588. [ 914.175791] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Creating folder: Instances. Parent ref: group-v51639. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 914.175824] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-831738ed-cb48-469d-9ab9-fb45c8ce7929 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.184448] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Created folder: Instances in parent group-v51639. [ 914.184679] env[61649]: DEBUG oslo.service.loopingcall [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.184863] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 914.185053] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24d79f17-0850-4871-bc08-c3288f931294 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.212424] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.212424] env[61649]: value = "task-158163" [ 914.212424] env[61649]: _type = "Task" [ 914.212424] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.223831] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158163, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.362420] env[61649]: DEBUG nova.compute.manager [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Received event network-vif-plugged-c4e8ab72-d146-4b4e-8976-397278cbadb2 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 914.362644] env[61649]: DEBUG oslo_concurrency.lockutils [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Acquiring lock "bf2399eb-b2df-43b3-bddd-48692825c40a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.362852] env[61649]: DEBUG oslo_concurrency.lockutils [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.363031] env[61649]: DEBUG oslo_concurrency.lockutils [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.363199] env[61649]: DEBUG nova.compute.manager [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] No waiting events found dispatching network-vif-plugged-c4e8ab72-d146-4b4e-8976-397278cbadb2 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 914.363365] env[61649]: WARNING nova.compute.manager [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Received unexpected event network-vif-plugged-c4e8ab72-d146-4b4e-8976-397278cbadb2 for instance with vm_state building and task_state spawning. [ 914.363526] env[61649]: DEBUG nova.compute.manager [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Received event network-changed-c4e8ab72-d146-4b4e-8976-397278cbadb2 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 914.363777] env[61649]: DEBUG nova.compute.manager [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Refreshing instance network info cache due to event network-changed-c4e8ab72-d146-4b4e-8976-397278cbadb2. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 914.363854] env[61649]: DEBUG oslo_concurrency.lockutils [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Acquiring lock "refresh_cache-bf2399eb-b2df-43b3-bddd-48692825c40a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.363986] env[61649]: DEBUG oslo_concurrency.lockutils [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Acquired lock "refresh_cache-bf2399eb-b2df-43b3-bddd-48692825c40a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.364193] env[61649]: DEBUG nova.network.neutron [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Refreshing network info cache for port c4e8ab72-d146-4b4e-8976-397278cbadb2 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 914.364676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Expecting reply to msg ca32c4269a6449d2a511fb9bde9ad0fc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 914.372065] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca32c4269a6449d2a511fb9bde9ad0fc [ 914.717639] env[61649]: DEBUG nova.network.neutron [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Updated VIF entry in instance network info cache for port c4e8ab72-d146-4b4e-8976-397278cbadb2. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 914.718078] env[61649]: DEBUG nova.network.neutron [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Updating instance_info_cache with network_info: [{"id": "c4e8ab72-d146-4b4e-8976-397278cbadb2", "address": "fa:16:3e:38:e6:b0", "network": {"id": "678fbb0a-0a80-49c6-b1e9-d15eb9090137", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1249474007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30ca32c18fcb46e08101de297b1771a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8480e3f5-68bd-4c27-ae1f-7c994a8202b1", "external-id": "nsx-vlan-transportzone-628", "segmentation_id": 628, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4e8ab72-d1", "ovs_interfaceid": "c4e8ab72-d146-4b4e-8976-397278cbadb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.718689] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Expecting reply to msg b9f7fcf53382497081861f8da533191c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 914.725593] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158163, 'name': CreateVM_Task} progress is 99%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.727131] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9f7fcf53382497081861f8da533191c [ 914.727754] env[61649]: DEBUG oslo_concurrency.lockutils [req-3116a6f5-18b5-479c-9938-56fcee476c76 req-865b4618-cf57-453c-8f03-da65cff6e189 service nova] Releasing lock "refresh_cache-bf2399eb-b2df-43b3-bddd-48692825c40a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.223242] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158163, 'name': CreateVM_Task} progress is 99%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.724234] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158163, 'name': CreateVM_Task} progress is 99%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.854892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg d14793bc525f4d499681b6db88150a31 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 915.864272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d14793bc525f4d499681b6db88150a31 [ 915.864855] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.226223] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158163, 'name': CreateVM_Task, 'duration_secs': 1.919113} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.226536] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 916.227056] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.227240] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.227573] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 916.227807] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20de013c-ef16-4f14-aa56-159124058f5e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.232712] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Waiting for the task: (returnval){ [ 916.232712] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]529fd15d-184b-a7a9-1264-d8027bb02f12" [ 916.232712] env[61649]: _type = "Task" [ 916.232712] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.241321] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]529fd15d-184b-a7a9-1264-d8027bb02f12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.743599] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.743856] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.744088] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.929889] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.929730] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.929909] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 946.930166] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 946.930758] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7cb37c4bee864c18afe20d241ad1b56f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 946.949677] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cb37c4bee864c18afe20d241ad1b56f [ 946.951947] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952114] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952252] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952378] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952499] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952618] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952737] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952853] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.952968] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.953087] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 946.953204] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 946.953696] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.953877] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.954007] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 947.928649] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.928848] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.929022] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.929229] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.929611] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f088176d5da548daa0dd6d19a7b0af15 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 947.939229] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f088176d5da548daa0dd6d19a7b0af15 [ 947.940422] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.940612] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.940784] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.940942] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 947.942048] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc2a01c-37e7-4808-b01e-bcbe768ab2bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.953603] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a7123a-f230-4935-b872-82da1cc1fed0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.972806] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec855ea5-c1a3-452a-a55b-14647b983334 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.979063] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6794088-6cf5-454d-a8b9-2c883c1cbc19 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.007697] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181808MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 948.007857] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.008068] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.008953] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9c989baabd144a2eb3ff542e022c8360 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.041103] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c989baabd144a2eb3ff542e022c8360 [ 948.045197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b18d1ae2fd3543d4b9e452e0db6ba6eb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.057377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b18d1ae2fd3543d4b9e452e0db6ba6eb [ 948.075493] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.075648] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.075774] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.075894] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.076026] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.076153] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.076270] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.076380] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.076490] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.076599] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 948.077123] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3410a6db493d4f2ba78f00ea9400f55a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.092665] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3410a6db493d4f2ba78f00ea9400f55a [ 948.093394] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.093871] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b9281a7753e04d52999a7b9a0752a0b6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.103360] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9281a7753e04d52999a7b9a0752a0b6 [ 948.104228] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d7a1c8c3-1694-4704-8414-098af751c05e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.104514] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2409be81329c45ef893a6ea24048d715 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.113299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2409be81329c45ef893a6ea24048d715 [ 948.113922] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e4d8cb96-182d-4b77-a8ac-dfd1bf52d484 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.114348] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3de1599535324beba60aa98b791d5e8c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.128276] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3de1599535324beba60aa98b791d5e8c [ 948.128974] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 10da1abe-1c95-44b8-a10d-ce618625b69b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.130427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6b9106306e994638acd2a18adf0cca79 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.139630] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b9106306e994638acd2a18adf0cca79 [ 948.140396] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a086f03c-c993-4e1a-8a3e-efa40bb8b8bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.140925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 17e2d540d34a4979940a25c162b8f46e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.151155] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17e2d540d34a4979940a25c162b8f46e [ 948.151824] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.152309] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2fecb71d94dc49bc97136eb41047fcbd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.162024] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fecb71d94dc49bc97136eb41047fcbd [ 948.163120] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7e77db18-077d-4665-ad90-c4e5f470716c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.163120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8c173d1280584157ae8e86f19eb9c6cd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.173260] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c173d1280584157ae8e86f19eb9c6cd [ 948.173857] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6efdc98b-e32d-4313-b13f-95c3d4911823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.174307] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0583026ac6924e7cb8d830bdf5e11b17 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.184599] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0583026ac6924e7cb8d830bdf5e11b17 [ 948.185534] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f51cfd74-25e5-4077-9b43-8cb38fe051f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.186015] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b51d6aea05aa4e6a8be1667910513e1d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.196359] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b51d6aea05aa4e6a8be1667910513e1d [ 948.196850] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db7db1c9-6716-4591-b669-b85dd595a3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.197431] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9fbd66706525422dbe212cabbae2a2e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.206571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fbd66706525422dbe212cabbae2a2e7 [ 948.207241] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 899db5f5-4963-4f7a-97d2-9c2dfd7a6981 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.207774] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a085932ee1e74a7a99d99abd011a87ae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.221120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a085932ee1e74a7a99d99abd011a87ae [ 948.221763] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.222200] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ea5ea40a7158435296ed0e2fef2972a2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.231100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea5ea40a7158435296ed0e2fef2972a2 [ 948.231714] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.232176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e5f7bd04e8ae42a8b0b2f099c1bfbe07 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.241228] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5f7bd04e8ae42a8b0b2f099c1bfbe07 [ 948.241859] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 3e1127c5-876f-47b9-b652-7a558711a1a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 948.242083] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 948.242230] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 948.501206] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923b14b5-2810-4cd5-9b10-8920b4601b9d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.509074] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a5841a-1e5b-473d-944c-b7c8eea2c991 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.537326] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9ce3b9-3c47-42b6-b366-860685a40efb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.543690] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c383c3-0f0e-4aa5-b5c1-4f51215424ab {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.556784] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.557250] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 307f502dfa97493485a488f7e612feaa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.566484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 307f502dfa97493485a488f7e612feaa [ 948.567394] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 948.569701] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f381a51f55b64ed3aee7ab8c64c513f5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 948.582818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f381a51f55b64ed3aee7ab8c64c513f5 [ 948.583462] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 948.583643] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.576s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.584551] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.750059] env[61649]: WARNING oslo_vmware.rw_handles [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 960.750059] env[61649]: ERROR oslo_vmware.rw_handles [ 960.750605] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 960.752469] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 960.752725] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Copying Virtual Disk [datastore1] vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/828af11d-f050-47d3-aa1b-c52041f96ce8/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 960.752996] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e44b5683-c38f-4548-a36f-220dd52150db {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.760905] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Waiting for the task: (returnval){ [ 960.760905] env[61649]: value = "task-158164" [ 960.760905] env[61649]: _type = "Task" [ 960.760905] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.768578] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Task: {'id': task-158164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.271032] env[61649]: DEBUG oslo_vmware.exceptions [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 961.271358] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.271959] env[61649]: ERROR nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 961.271959] env[61649]: Faults: ['InvalidArgument'] [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Traceback (most recent call last): [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] yield resources [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self.driver.spawn(context, instance, image_meta, [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self._fetch_image_if_missing(context, vi) [ 961.271959] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] image_cache(vi, tmp_image_ds_loc) [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] vm_util.copy_virtual_disk( [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] session._wait_for_task(vmdk_copy_task) [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] return self.wait_for_task(task_ref) [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] return evt.wait() [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] result = hub.switch() [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 961.272412] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] return self.greenlet.switch() [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self.f(*self.args, **self.kw) [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] raise exceptions.translate_fault(task_info.error) [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Faults: ['InvalidArgument'] [ 961.272746] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] [ 961.272746] env[61649]: INFO nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Terminating instance [ 961.274387] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.274641] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.274912] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0689b668-7212-47e4-8797-dc35fff6312e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.277087] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 961.277346] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 961.278097] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5822d83f-a176-4220-bec1-522784184ec0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.284578] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 961.284820] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58c031cb-aec6-4b06-a927-4325e41fa5ab {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.286874] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.287108] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 961.288035] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5be83a69-54e7-4c27-b2e7-db88f47a5c5d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.292472] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Waiting for the task: (returnval){ [ 961.292472] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52188ae0-fa66-69be-b548-feda6fd5a681" [ 961.292472] env[61649]: _type = "Task" [ 961.292472] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.299680] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52188ae0-fa66-69be-b548-feda6fd5a681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.356154] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 961.356464] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 961.356693] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Deleting the datastore file [datastore1] fd0ac9db-adc2-46f2-93ff-0b7e299534a7 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.356987] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-901d9138-d127-4f78-9db5-c36be0646dac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.363027] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Waiting for the task: (returnval){ [ 961.363027] env[61649]: value = "task-158166" [ 961.363027] env[61649]: _type = "Task" [ 961.363027] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.370089] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Task: {'id': task-158166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.802561] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 961.802828] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Creating directory with path [datastore1] vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.803033] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c53db6a2-b691-41d7-b93d-696822f77783 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.814769] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Created directory with path [datastore1] vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.814969] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Fetch image to [datastore1] vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 961.815135] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 961.815868] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf24b9b-cb66-4bcf-90db-4f459be76b46 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.823153] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71469b63-759f-4662-9943-30bd18b5eac6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.832129] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbcac3d-5a68-43ae-b620-5d681d2e287a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.863405] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2660ba-7a5d-4514-a98e-aea1c31f474c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.874269] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-272fa8e0-8082-4639-8efe-1cab9df6cec2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.875945] env[61649]: DEBUG oslo_vmware.api [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Task: {'id': task-158166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080341} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.876211] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.876396] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 961.876570] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 961.876742] env[61649]: INFO nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 961.878921] env[61649]: DEBUG nova.compute.claims [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 961.879102] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.879351] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.881396] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 059f91aa9b864ad393d61d22316285aa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 961.896478] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 961.927433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 059f91aa9b864ad393d61d22316285aa [ 961.949541] env[61649]: DEBUG oslo_vmware.rw_handles [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 962.020629] env[61649]: DEBUG oslo_vmware.rw_handles [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 962.020823] env[61649]: DEBUG oslo_vmware.rw_handles [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 962.260603] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866251e3-4797-44df-9f20-4e0e94fd3056 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.268356] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf04c4fe-e598-4d97-abc1-67d421db5c25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.299561] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137f90a9-5806-43b9-8694-263ae0dca1c3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.307100] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8bdc8b-6412-4b38-9776-74b83b4ad2dc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.321518] env[61649]: DEBUG nova.compute.provider_tree [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.322013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 3e1c1be4ada148949685500d98f83662 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.329594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e1c1be4ada148949685500d98f83662 [ 962.330473] env[61649]: DEBUG nova.scheduler.client.report [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 962.332714] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg b80b92d9a915446580db8425e058674c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.343415] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b80b92d9a915446580db8425e058674c [ 962.344129] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.465s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.344672] env[61649]: ERROR nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 962.344672] env[61649]: Faults: ['InvalidArgument'] [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Traceback (most recent call last): [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self.driver.spawn(context, instance, image_meta, [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self._fetch_image_if_missing(context, vi) [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] image_cache(vi, tmp_image_ds_loc) [ 962.344672] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] vm_util.copy_virtual_disk( [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] session._wait_for_task(vmdk_copy_task) [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] return self.wait_for_task(task_ref) [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] return evt.wait() [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] result = hub.switch() [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] return self.greenlet.switch() [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 962.344971] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] self.f(*self.args, **self.kw) [ 962.345271] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 962.345271] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] raise exceptions.translate_fault(task_info.error) [ 962.345271] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 962.345271] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Faults: ['InvalidArgument'] [ 962.345271] env[61649]: ERROR nova.compute.manager [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] [ 962.345398] env[61649]: DEBUG nova.compute.utils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 962.346683] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Build of instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 was re-scheduled: A specified parameter was not correct: fileType [ 962.346683] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 962.347059] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 962.347238] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 962.347436] env[61649]: DEBUG nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 962.347646] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 962.573285] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 1753df936fc647f389021390852fa818 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.581572] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1753df936fc647f389021390852fa818 [ 962.582136] env[61649]: DEBUG nova.network.neutron [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.582609] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 6332bfc5b7d64c78be8e876651ca2a36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.591725] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6332bfc5b7d64c78be8e876651ca2a36 [ 962.592211] env[61649]: INFO nova.compute.manager [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Took 0.24 seconds to deallocate network for instance. [ 962.594732] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 728ea69d7257408ab44ece28fadb9c9c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.626257] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 728ea69d7257408ab44ece28fadb9c9c [ 962.629041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 9884bcdb2ea14dbc87d997992a596772 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.661662] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9884bcdb2ea14dbc87d997992a596772 [ 962.680539] env[61649]: INFO nova.scheduler.client.report [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Deleted allocations for instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 [ 962.686594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg d1161e35e998421c88cd6f23ce9c5b8f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.698941] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1161e35e998421c88cd6f23ce9c5b8f [ 962.699544] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4345777d-4b5b-4f1b-a5a7-dd4cdbb4b23f tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 384.275s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.700080] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg a0bb937f5f974e7d9b4255528709ea5b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.700875] env[61649]: DEBUG oslo_concurrency.lockutils [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 185.040s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.701098] env[61649]: DEBUG oslo_concurrency.lockutils [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Acquiring lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.701332] env[61649]: DEBUG oslo_concurrency.lockutils [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.701498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.703240] env[61649]: INFO nova.compute.manager [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Terminating instance [ 962.704863] env[61649]: DEBUG nova.compute.manager [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 962.705049] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 962.705693] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-670c7709-6415-4105-afbe-5c09fd6beb27 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.715192] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9df485-c361-4a2f-ac89-c02ace7c6738 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.725435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0bb937f5f974e7d9b4255528709ea5b [ 962.725939] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 962.727609] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 9432707959b64b458c2960bfe3ebaa11 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.745118] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fd0ac9db-adc2-46f2-93ff-0b7e299534a7 could not be found. [ 962.745326] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 962.745502] env[61649]: INFO nova.compute.manager [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 962.745745] env[61649]: DEBUG oslo.service.loopingcall [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.746123] env[61649]: DEBUG nova.compute.manager [-] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 962.746237] env[61649]: DEBUG nova.network.neutron [-] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 962.757209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9432707959b64b458c2960bfe3ebaa11 [ 962.764034] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f73b9ee2bcc24b059098b1deefd55935 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.770945] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f73b9ee2bcc24b059098b1deefd55935 [ 962.771158] env[61649]: DEBUG nova.network.neutron [-] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.771543] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6d84f20a13cd43f383d14e5900c12fa0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.772922] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.773146] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.776264] env[61649]: INFO nova.compute.claims [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.776264] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg fc931385be6b43fabf5161c3360b8379 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.778518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d84f20a13cd43f383d14e5900c12fa0 [ 962.778890] env[61649]: INFO nova.compute.manager [-] [instance: fd0ac9db-adc2-46f2-93ff-0b7e299534a7] Took 0.03 seconds to deallocate network for instance. [ 962.782175] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg aa85465d529c41ebaed588d0af9eed20 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.807481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc931385be6b43fabf5161c3360b8379 [ 962.809369] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 81d3e65723c341858128451603c8bc54 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.812864] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa85465d529c41ebaed588d0af9eed20 [ 962.816292] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81d3e65723c341858128451603c8bc54 [ 962.826184] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg edef7599c0dc4a7db46d7c7967c8d9fc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.864218] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edef7599c0dc4a7db46d7c7967c8d9fc [ 962.868250] env[61649]: DEBUG oslo_concurrency.lockutils [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Lock "fd0ac9db-adc2-46f2-93ff-0b7e299534a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.868592] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-32434135-d37d-4fd9-8393-5505a3cb03b9 tempest-ServersTestFqdnHostnames-2010770538 tempest-ServersTestFqdnHostnames-2010770538-project-member] Expecting reply to msg 3b533f5201af4961a0b55ad888c2b2e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 962.879466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b533f5201af4961a0b55ad888c2b2e0 [ 963.108474] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ae61da-ac5a-471b-8964-0b301a2d37b3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.116377] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80149a3-acab-4061-905a-7f6788dd388f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.145088] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c8cb75-d4c0-4ed3-8080-af19aacbb874 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.152395] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d2054e-0283-4c20-a5b2-7451812f627c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.166204] env[61649]: DEBUG nova.compute.provider_tree [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.166708] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg b910ae188d6e4e24849635c732c6a7c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 963.174587] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b910ae188d6e4e24849635c732c6a7c7 [ 963.175484] env[61649]: DEBUG nova.scheduler.client.report [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 963.177799] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg c2b8e01d6bf34232a46dec7772b791d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 963.189275] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2b8e01d6bf34232a46dec7772b791d5 [ 963.190019] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.417s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.190510] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 963.192119] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg b25a0838303b4e6184625143d4cbb746 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 963.223901] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b25a0838303b4e6184625143d4cbb746 [ 963.225725] env[61649]: DEBUG nova.compute.utils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.226335] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 9c8674c0bfe443d790694b956d1c5f47 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 963.227368] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 963.227542] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 963.238063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c8674c0bfe443d790694b956d1c5f47 [ 963.238589] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 963.240233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 0593c5c2850a423386dde6b8c490dc1d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 963.269837] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0593c5c2850a423386dde6b8c490dc1d [ 963.272384] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 3a1e053f18ca40798584a7c17321cca4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 963.299511] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a1e053f18ca40798584a7c17321cca4 [ 963.300640] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 963.317527] env[61649]: DEBUG nova.policy [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6dc125412494047b243a232dd14e41f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '611107ae6fd34f229748efbb07ee5440', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 963.324909] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:09:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='a8489c58-3905-40b2-8d22-c4be0791c020',id=35,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1196159657',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 963.325130] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 963.325286] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.325468] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 963.325613] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.325758] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 963.325990] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 963.326126] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 963.326291] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 963.326453] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 963.326624] env[61649]: DEBUG nova.virt.hardware [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 963.327764] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59819f7e-a4cf-447e-8ffe-f510bd540785 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.334996] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f830e88-80ed-4c13-8187-44cee68be07a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.641507] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Successfully created port: 3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.760550] env[61649]: DEBUG nova.compute.manager [req-c8820eb0-e7c9-4113-9f08-a9ed1bf8f0e8 req-d191cf12-db54-45dd-a4bd-fbe8d97a4846 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Received event network-vif-plugged-3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 964.760939] env[61649]: DEBUG oslo_concurrency.lockutils [req-c8820eb0-e7c9-4113-9f08-a9ed1bf8f0e8 req-d191cf12-db54-45dd-a4bd-fbe8d97a4846 service nova] Acquiring lock "6ab197e9-3e38-4b37-b625-c30b6977261a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.761000] env[61649]: DEBUG oslo_concurrency.lockutils [req-c8820eb0-e7c9-4113-9f08-a9ed1bf8f0e8 req-d191cf12-db54-45dd-a4bd-fbe8d97a4846 service nova] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.761169] env[61649]: DEBUG oslo_concurrency.lockutils [req-c8820eb0-e7c9-4113-9f08-a9ed1bf8f0e8 req-d191cf12-db54-45dd-a4bd-fbe8d97a4846 service nova] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.761338] env[61649]: DEBUG nova.compute.manager [req-c8820eb0-e7c9-4113-9f08-a9ed1bf8f0e8 req-d191cf12-db54-45dd-a4bd-fbe8d97a4846 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] No waiting events found dispatching network-vif-plugged-3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 964.761502] env[61649]: WARNING nova.compute.manager [req-c8820eb0-e7c9-4113-9f08-a9ed1bf8f0e8 req-d191cf12-db54-45dd-a4bd-fbe8d97a4846 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Received unexpected event network-vif-plugged-3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f for instance with vm_state building and task_state spawning. [ 964.767544] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Successfully updated port: 3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.767975] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg a0bf07331e2a4525b26a15e2a9a6acfd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 964.778591] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0bf07331e2a4525b26a15e2a9a6acfd [ 964.779331] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "refresh_cache-6ab197e9-3e38-4b37-b625-c30b6977261a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.779476] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquired lock "refresh_cache-6ab197e9-3e38-4b37-b625-c30b6977261a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.779623] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 964.780030] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 2b6b9efeb64d45748d1ef04e496330ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 964.787036] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b6b9efeb64d45748d1ef04e496330ab [ 964.880023] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 965.197070] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Updating instance_info_cache with network_info: [{"id": "3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f", "address": "fa:16:3e:3a:2d:27", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3776ca95-bf", "ovs_interfaceid": "3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.197634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg d2340733baed428b986e23d460b01a42 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 965.210121] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2340733baed428b986e23d460b01a42 [ 965.210755] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Releasing lock "refresh_cache-6ab197e9-3e38-4b37-b625-c30b6977261a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.211035] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance network_info: |[{"id": "3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f", "address": "fa:16:3e:3a:2d:27", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3776ca95-bf", "ovs_interfaceid": "3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 965.211429] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:2d:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.218728] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Creating folder: Project (611107ae6fd34f229748efbb07ee5440). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.219304] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-573c3d32-c499-41f2-8109-c13fc31cb40e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.230847] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Created folder: Project (611107ae6fd34f229748efbb07ee5440) in parent group-v51588. [ 965.231050] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Creating folder: Instances. Parent ref: group-v51642. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 965.231301] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c984a05-51ed-4f90-b88e-429b223e4f6a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.239243] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Created folder: Instances in parent group-v51642. [ 965.239470] env[61649]: DEBUG oslo.service.loopingcall [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.239648] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 965.239836] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20a30ff4-2507-4ce2-a585-b64698707b3a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.259263] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.259263] env[61649]: value = "task-158169" [ 965.259263] env[61649]: _type = "Task" [ 965.259263] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.266603] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158169, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.772668] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158169, 'name': CreateVM_Task, 'duration_secs': 0.279408} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.772668] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 965.772668] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.772668] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.772668] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.773018] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f2cf77d-638c-4c94-8079-d41a1994c3bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.774955] env[61649]: DEBUG oslo_vmware.api [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Waiting for the task: (returnval){ [ 965.774955] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e17fc5-8003-338a-5707-6e933736ae5a" [ 965.774955] env[61649]: _type = "Task" [ 965.774955] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.788288] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.788842] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.789234] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.820159] env[61649]: DEBUG nova.compute.manager [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Received event network-changed-3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 966.820386] env[61649]: DEBUG nova.compute.manager [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Refreshing instance network info cache due to event network-changed-3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 966.820560] env[61649]: DEBUG oslo_concurrency.lockutils [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] Acquiring lock "refresh_cache-6ab197e9-3e38-4b37-b625-c30b6977261a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.820702] env[61649]: DEBUG oslo_concurrency.lockutils [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] Acquired lock "refresh_cache-6ab197e9-3e38-4b37-b625-c30b6977261a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.820922] env[61649]: DEBUG nova.network.neutron [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Refreshing network info cache for port 3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 966.821417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] Expecting reply to msg f13130dc6e3441d3ae8fb677a5446835 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 966.828781] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f13130dc6e3441d3ae8fb677a5446835 [ 967.248467] env[61649]: DEBUG nova.network.neutron [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Updated VIF entry in instance network info cache for port 3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 967.248775] env[61649]: DEBUG nova.network.neutron [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Updating instance_info_cache with network_info: [{"id": "3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f", "address": "fa:16:3e:3a:2d:27", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3776ca95-bf", "ovs_interfaceid": "3776ca95-bf9f-41e2-8b74-e2fdcbeb4e4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.249783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] Expecting reply to msg 8dbd5ed525e2408e9746352ce9e79a49 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 967.257455] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dbd5ed525e2408e9746352ce9e79a49 [ 967.258059] env[61649]: DEBUG oslo_concurrency.lockutils [req-9f6b81f9-1be9-4474-a7c7-7ab1756db207 req-12a94819-1859-4db8-80b3-230ea262c6b8 service nova] Releasing lock "refresh_cache-6ab197e9-3e38-4b37-b625-c30b6977261a" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.896470] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.896757] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.409548] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 14a4e1f365354ee58372b82ec3fb3605 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 974.418483] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14a4e1f365354ee58372b82ec3fb3605 [ 974.418960] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "bf2399eb-b2df-43b3-bddd-48692825c40a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.953796] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg ac345cf28e7b45a3b1281c9dab66f169 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 979.971836] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac345cf28e7b45a3b1281c9dab66f169 [ 979.972405] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "99f9912a-edf0-40f5-a7ce-55767081705b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.973028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "5730229a-fd0c-4df1-9059-cd6ed39e954c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.973306] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.674692] env[61649]: DEBUG oslo_concurrency.lockutils [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] Acquiring lock "0534f500-d8d8-4aad-896c-c965778c3a6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.674982] env[61649]: DEBUG oslo_concurrency.lockutils [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] Lock "0534f500-d8d8-4aad-896c-c965778c3a6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.124050] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] Acquiring lock "6d3ee887-6b6b-4199-aea6-f0de0153e5c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.124341] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] Lock "6d3ee887-6b6b-4199-aea6-f0de0153e5c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.805429] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg e4f69a78980f46e4b11a0188c191fab9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 985.815452] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4f69a78980f46e4b11a0188c191fab9 [ 985.815830] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "6ab197e9-3e38-4b37-b625-c30b6977261a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.754501] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Acquiring lock "82ae439e-5fe0-4bed-b550-e34929c139f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.754816] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Lock "82ae439e-5fe0-4bed-b550-e34929c139f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.766908] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] Acquiring lock "40b216ba-3afd-4cfe-b98b-c5de03501317" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.767133] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] Lock "40b216ba-3afd-4cfe-b98b-c5de03501317" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.928837] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.372986] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Acquiring lock "a12bff02-f7da-43a1-b614-beb3d6908e0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.372986] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Lock "a12bff02-f7da-43a1-b614-beb3d6908e0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.924492] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.924927] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6239eccdd8f5431eb10b5a6cd89eba2c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1006.943732] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6239eccdd8f5431eb10b5a6cd89eba2c [ 1007.928642] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1007.928896] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.813706] env[61649]: DEBUG oslo_concurrency.lockutils [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Acquiring lock "2b4cc40e-a6fc-48df-baaf-f74352c24408" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.813706] env[61649]: DEBUG oslo_concurrency.lockutils [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Lock "2b4cc40e-a6fc-48df-baaf-f74352c24408" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.928750] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.929227] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1008.929227] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1008.929715] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 95e84681088e4583aab6b1cf527883f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1008.955861] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95e84681088e4583aab6b1cf527883f2 [ 1008.958122] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 00931111-13a1-447d-a401-943221badd59] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.958311] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.958449] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.958570] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.958764] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.958819] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.958914] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.959030] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.959153] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.959297] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1008.959411] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1008.959913] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.960181] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.960314] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1008.960498] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.960830] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 60c85a9694d14c4e9c79bb1454972555 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1008.969488] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60c85a9694d14c4e9c79bb1454972555 [ 1008.970692] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.970897] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.971070] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.971221] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1008.972641] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f254bb2-f7bb-4494-9538-574d07315aa9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.981718] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aace0748-8b43-491f-8847-92e7b4a1378d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.996255] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8eacea-3b3b-4c22-896c-31b4d43560fb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.003123] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225beda3-5836-4ff5-8789-3eceec2c732c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.033780] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181751MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1009.034098] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.034422] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.035334] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ff806a4f0e664f8c951a533267ccac16 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.069120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff806a4f0e664f8c951a533267ccac16 [ 1009.073906] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5579f0dfcc6d423791a123b55b5ee733 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.082648] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5579f0dfcc6d423791a123b55b5ee733 [ 1009.104372] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 00931111-13a1-447d-a401-943221badd59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104372] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 99f9912a-edf0-40f5-a7ce-55767081705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104372] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104372] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104642] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104737] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104852] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.104944] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1009.105577] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f3eee4e6d9d844eab291a1480aa9c893 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.119697] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3eee4e6d9d844eab291a1480aa9c893 [ 1009.120553] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.121051] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 95dba1cf70654cfdb599289acc1b9c97 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.131698] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95dba1cf70654cfdb599289acc1b9c97 [ 1009.132464] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7e77db18-077d-4665-ad90-c4e5f470716c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.132958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ea027f04da4b46728d9a12d5af0d3655 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.146985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea027f04da4b46728d9a12d5af0d3655 [ 1009.147700] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6efdc98b-e32d-4313-b13f-95c3d4911823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.148209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0abc772c00ba4d3489393c120ce4c25c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.157510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0abc772c00ba4d3489393c120ce4c25c [ 1009.158202] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f51cfd74-25e5-4077-9b43-8cb38fe051f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.158701] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ab661c5ec78e403eb393892da76ea499 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.168158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab661c5ec78e403eb393892da76ea499 [ 1009.168830] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db7db1c9-6716-4591-b669-b85dd595a3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.169325] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f6cc7ed68a664daf87f7b96d003cdce7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.178327] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6cc7ed68a664daf87f7b96d003cdce7 [ 1009.178971] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 899db5f5-4963-4f7a-97d2-9c2dfd7a6981 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.179460] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2c7de5194e7446afa60d0bad04e2fa3a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.189066] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c7de5194e7446afa60d0bad04e2fa3a [ 1009.189762] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.190244] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ebd20abe66b24cf699db1c4b8ae7a3dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.200705] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebd20abe66b24cf699db1c4b8ae7a3dc [ 1009.201373] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.201835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a2c155c1d23d42989046d8494fde962f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.212501] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2c155c1d23d42989046d8494fde962f [ 1009.213167] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 3e1127c5-876f-47b9-b652-7a558711a1a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.213633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d0f6db6e9a3b4ef78352856c370ed978 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.222668] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0f6db6e9a3b4ef78352856c370ed978 [ 1009.223326] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.223783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d4de0239f845459fb5fa49912e9463fb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.233124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4de0239f845459fb5fa49912e9463fb [ 1009.234004] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.234347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0dabb4d59a484238ba6d26abb3541468 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.244744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dabb4d59a484238ba6d26abb3541468 [ 1009.245446] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0534f500-d8d8-4aad-896c-c965778c3a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.245939] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9104945e26134290bf75641714579343 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.254997] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9104945e26134290bf75641714579343 [ 1009.255624] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6d3ee887-6b6b-4199-aea6-f0de0153e5c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.256106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 10a114af30684ad9a739d8491e3495c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.266129] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10a114af30684ad9a739d8491e3495c7 [ 1009.266765] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 82ae439e-5fe0-4bed-b550-e34929c139f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.267201] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 04d6fac4ed464bc38c627b6d4dfcc574 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.276090] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04d6fac4ed464bc38c627b6d4dfcc574 [ 1009.276769] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 40b216ba-3afd-4cfe-b98b-c5de03501317 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.277216] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ee5de56a34394f31a8d65121a5cd80c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.285821] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee5de56a34394f31a8d65121a5cd80c5 [ 1009.286413] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a12bff02-f7da-43a1-b614-beb3d6908e0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.286834] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9b32614990854872a7a5a58a0ac44fd2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.295542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b32614990854872a7a5a58a0ac44fd2 [ 1009.296433] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2b4cc40e-a6fc-48df-baaf-f74352c24408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1009.296670] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1009.296817] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1009.654508] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4987a92-7866-4975-bea5-edf3fbd2d74f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.662385] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5169b8-ca53-40b8-b378-5420440e9f0a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.699645] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5bfffd-9009-4b5d-aadd-ca7d27fa5c3e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.707596] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70c79ef-42a6-4a2f-88f6-4ded4df96c15 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.721715] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.722272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bc7341f2f31e4050aa0fa33dc2c363cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.730007] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc7341f2f31e4050aa0fa33dc2c363cb [ 1009.731053] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1009.733760] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c8193d14850e4fe9ab8d586996e5756a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1009.748280] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8193d14850e4fe9ab8d586996e5756a [ 1009.749020] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1009.749298] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.715s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.997054] env[61649]: WARNING oslo_vmware.rw_handles [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1009.997054] env[61649]: ERROR oslo_vmware.rw_handles [ 1009.997715] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1009.999591] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1009.999856] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Copying Virtual Disk [datastore1] vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/bccbc138-28fa-4cb9-ab5d-ea09bdee4584/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1010.000197] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-763304ec-208a-4296-a6d0-13d03211d484 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.008794] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Waiting for the task: (returnval){ [ 1010.008794] env[61649]: value = "task-158180" [ 1010.008794] env[61649]: _type = "Task" [ 1010.008794] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.016938] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Task: {'id': task-158180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.518922] env[61649]: DEBUG oslo_vmware.exceptions [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1010.519228] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.519824] env[61649]: ERROR nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1010.519824] env[61649]: Faults: ['InvalidArgument'] [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] Traceback (most recent call last): [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] yield resources [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self.driver.spawn(context, instance, image_meta, [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self._fetch_image_if_missing(context, vi) [ 1010.519824] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] image_cache(vi, tmp_image_ds_loc) [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] vm_util.copy_virtual_disk( [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] session._wait_for_task(vmdk_copy_task) [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] return self.wait_for_task(task_ref) [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] return evt.wait() [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] result = hub.switch() [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1010.520189] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] return self.greenlet.switch() [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self.f(*self.args, **self.kw) [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] raise exceptions.translate_fault(task_info.error) [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] Faults: ['InvalidArgument'] [ 1010.520538] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] [ 1010.520538] env[61649]: INFO nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Terminating instance [ 1010.521750] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.521956] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.522548] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1010.522739] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1010.522962] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0110bc1-2c95-40f3-90d3-c69065c92243 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.525193] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47701a3-6804-4228-b21d-8c9c02e72118 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.533030] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1010.533269] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cf96121-432d-4e9a-9595-5cf751c5f375 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.535407] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.535621] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1010.536632] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53f7fbc0-5cdb-404e-83b6-937ccc498177 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.541789] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 1010.541789] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a2edb8-1f58-9741-03d5-504fd6a143af" [ 1010.541789] env[61649]: _type = "Task" [ 1010.541789] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.548722] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a2edb8-1f58-9741-03d5-504fd6a143af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.590916] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1010.591134] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1010.591319] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Deleting the datastore file [datastore1] 00931111-13a1-447d-a401-943221badd59 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.591583] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be3933ac-93bf-471b-b120-d256516d58f0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.597409] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Waiting for the task: (returnval){ [ 1010.597409] env[61649]: value = "task-158182" [ 1010.597409] env[61649]: _type = "Task" [ 1010.597409] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.604737] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Task: {'id': task-158182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.718813] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1010.718813] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1010.978841] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6c848629795b4764b05262174324aacc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1010.987938] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c848629795b4764b05262174324aacc [ 1011.051550] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1011.051908] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating directory with path [datastore1] vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.052228] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d700b7e1-c36f-457a-a0ce-ac69485ae0df {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.063890] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Created directory with path [datastore1] vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.064191] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Fetch image to [datastore1] vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1011.064431] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1011.065239] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a94067-03b2-4a7a-a76b-df6777cce10f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.072923] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa016620-de05-46ba-ad5c-bfd6732172a0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.080897] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ae37b7-8afb-494f-b0e0-ee644d61dede {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.113781] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bc5190-3340-466d-8d12-e2004e8fc9f2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.120992] env[61649]: DEBUG oslo_vmware.api [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Task: {'id': task-158182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066006} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.122482] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.122735] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1011.122970] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1011.123202] env[61649]: INFO nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1011.124954] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4398ac47-ce25-49b6-9658-ccffd944754a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.126833] env[61649]: DEBUG nova.compute.claims [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1011.127069] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.127352] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.129198] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 4f561bb57eec4f958598e20b337eb63d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1011.150047] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1011.167931] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f561bb57eec4f958598e20b337eb63d [ 1011.207485] env[61649]: DEBUG oslo_vmware.rw_handles [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1011.269933] env[61649]: DEBUG oslo_vmware.rw_handles [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1011.270525] env[61649]: DEBUG oslo_vmware.rw_handles [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1011.541033] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04fd878-b383-499b-9274-205d60a12bd5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.548105] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b991985-a1d9-4322-ac08-44bcd2b51a22 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.577138] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1ec813-fc45-4092-a374-d8375a70a5d1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.585312] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a70326c-aac1-4f14-9606-495286b3d190 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.599371] env[61649]: DEBUG nova.compute.provider_tree [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.599870] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 48aeda4d86ea467c8c7a713c9301a40e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1011.607013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48aeda4d86ea467c8c7a713c9301a40e [ 1011.607878] env[61649]: DEBUG nova.scheduler.client.report [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1011.610102] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 889d9e39fc8b4c53bcc75dcb7c7ecf77 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1011.621668] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 889d9e39fc8b4c53bcc75dcb7c7ecf77 [ 1011.622327] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.495s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.622857] env[61649]: ERROR nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1011.622857] env[61649]: Faults: ['InvalidArgument'] [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] Traceback (most recent call last): [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self.driver.spawn(context, instance, image_meta, [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self._fetch_image_if_missing(context, vi) [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] image_cache(vi, tmp_image_ds_loc) [ 1011.622857] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] vm_util.copy_virtual_disk( [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] session._wait_for_task(vmdk_copy_task) [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] return self.wait_for_task(task_ref) [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] return evt.wait() [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] result = hub.switch() [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] return self.greenlet.switch() [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1011.623158] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] self.f(*self.args, **self.kw) [ 1011.623459] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1011.623459] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] raise exceptions.translate_fault(task_info.error) [ 1011.623459] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1011.623459] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] Faults: ['InvalidArgument'] [ 1011.623459] env[61649]: ERROR nova.compute.manager [instance: 00931111-13a1-447d-a401-943221badd59] [ 1011.623578] env[61649]: DEBUG nova.compute.utils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1011.624851] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Build of instance 00931111-13a1-447d-a401-943221badd59 was re-scheduled: A specified parameter was not correct: fileType [ 1011.624851] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1011.625223] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1011.625398] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1011.625566] env[61649]: DEBUG nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1011.625726] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1011.915405] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 96f32322286542a3b2720aae3781847a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1011.924046] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96f32322286542a3b2720aae3781847a [ 1011.924647] env[61649]: DEBUG nova.network.neutron [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.925151] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 4a6705603a8c4f3ea05e1b55dcc0509b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1011.934488] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a6705603a8c4f3ea05e1b55dcc0509b [ 1011.935073] env[61649]: INFO nova.compute.manager [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Took 0.31 seconds to deallocate network for instance. [ 1011.936765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg bbfe11c1d1014595ab96870ec9b237f6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1011.969902] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbfe11c1d1014595ab96870ec9b237f6 [ 1011.971120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg cf316d87de55452b9f196113796bb2bb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.001138] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf316d87de55452b9f196113796bb2bb [ 1012.019956] env[61649]: INFO nova.scheduler.client.report [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Deleted allocations for instance 00931111-13a1-447d-a401-943221badd59 [ 1012.030622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 56c7ecaa806f4051a14da519f76b56ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.040879] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56c7ecaa806f4051a14da519f76b56ce [ 1012.041542] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d10fa353-d5f2-440c-8526-b0f317098d35 tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "00931111-13a1-447d-a401-943221badd59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 431.300s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.042066] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Expecting reply to msg 298ad58dabbf4c59b23168785b26a87f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.042789] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "00931111-13a1-447d-a401-943221badd59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 232.511s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.042995] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Acquiring lock "00931111-13a1-447d-a401-943221badd59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.043192] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "00931111-13a1-447d-a401-943221badd59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.043351] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "00931111-13a1-447d-a401-943221badd59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.045783] env[61649]: INFO nova.compute.manager [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Terminating instance [ 1012.047454] env[61649]: DEBUG nova.compute.manager [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1012.047646] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1012.048742] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a98bbc85-7d5c-4f6e-a284-e1771db91561 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.058876] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0685d8ef-2a59-4dc3-bc6a-2d81f95d9b80 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.069348] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 298ad58dabbf4c59b23168785b26a87f [ 1012.069844] env[61649]: DEBUG nova.compute.manager [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] [instance: d7a1c8c3-1694-4704-8414-098af751c05e] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1012.071469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Expecting reply to msg 8758750584de474e90ad2c2a20329ba9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.089014] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 00931111-13a1-447d-a401-943221badd59 could not be found. [ 1012.089247] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1012.089438] env[61649]: INFO nova.compute.manager [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] [instance: 00931111-13a1-447d-a401-943221badd59] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1012.089684] env[61649]: DEBUG oslo.service.loopingcall [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.089904] env[61649]: DEBUG nova.compute.manager [-] [instance: 00931111-13a1-447d-a401-943221badd59] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1012.089999] env[61649]: DEBUG nova.network.neutron [-] [instance: 00931111-13a1-447d-a401-943221badd59] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1012.092456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8758750584de474e90ad2c2a20329ba9 [ 1012.092945] env[61649]: DEBUG nova.compute.manager [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] [instance: d7a1c8c3-1694-4704-8414-098af751c05e] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1012.093271] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Expecting reply to msg 91b07bb2f73947c080c6cb77ab8aab19 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.105592] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91b07bb2f73947c080c6cb77ab8aab19 [ 1012.112358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3b02caa80a7b495694e8556975e2d505 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.116470] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91b516ed-70dd-4ac8-97aa-b8094a21b093 tempest-SecurityGroupsTestJSON-1426961127 tempest-SecurityGroupsTestJSON-1426961127-project-member] Lock "d7a1c8c3-1694-4704-8414-098af751c05e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.127s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.117323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg cbce89c73b434b4dbaf9aa5215a79580 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.118854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b02caa80a7b495694e8556975e2d505 [ 1012.119385] env[61649]: DEBUG nova.network.neutron [-] [instance: 00931111-13a1-447d-a401-943221badd59] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.119854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9a33f57cfd5049e39a3154ac2a07dfa8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.125537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbce89c73b434b4dbaf9aa5215a79580 [ 1012.126167] env[61649]: DEBUG nova.compute.manager [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: e4d8cb96-182d-4b77-a8ac-dfd1bf52d484] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1012.127990] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 00f0acc24f22465e9a42c72941f381c6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.129321] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a33f57cfd5049e39a3154ac2a07dfa8 [ 1012.130074] env[61649]: INFO nova.compute.manager [-] [instance: 00931111-13a1-447d-a401-943221badd59] Took 0.04 seconds to deallocate network for instance. [ 1012.133582] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg d5d84a06124f41039aea3e072802e6d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.150017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00f0acc24f22465e9a42c72941f381c6 [ 1012.150805] env[61649]: DEBUG nova.compute.manager [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: e4d8cb96-182d-4b77-a8ac-dfd1bf52d484] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1012.151360] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 13545282183a46acbc72800e9d494089 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.160363] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5d84a06124f41039aea3e072802e6d2 [ 1012.160996] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13545282183a46acbc72800e9d494089 [ 1012.177233] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "e4d8cb96-182d-4b77-a8ac-dfd1bf52d484" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.291s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.179930] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg 96cc62090fef407ca90d6a14dca430d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.181183] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 822e2da017114669a29db8d0156b7791 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.198403] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 822e2da017114669a29db8d0156b7791 [ 1012.198971] env[61649]: DEBUG nova.compute.manager [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: 10da1abe-1c95-44b8-a10d-ce618625b69b] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1012.200782] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 68cd511bd449492d93d7f950b846f6ed in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.221670] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96cc62090fef407ca90d6a14dca430d3 [ 1012.224577] env[61649]: DEBUG oslo_concurrency.lockutils [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Lock "00931111-13a1-447d-a401-943221badd59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.224894] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-60084869-ef27-423e-811f-22216929276b tempest-InstanceActionsNegativeTestJSON-2020899507 tempest-InstanceActionsNegativeTestJSON-2020899507-project-member] Expecting reply to msg d2e589f870214a9997a06f21fc02938c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.228766] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68cd511bd449492d93d7f950b846f6ed [ 1012.229167] env[61649]: DEBUG nova.compute.manager [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] [instance: 10da1abe-1c95-44b8-a10d-ce618625b69b] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1012.229564] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Expecting reply to msg 9713d81f3b784382acae96e0b3aaf555 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.240035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2e589f870214a9997a06f21fc02938c [ 1012.240528] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9713d81f3b784382acae96e0b3aaf555 [ 1012.252025] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1e76531d-3cec-4608-9940-aba46c3e78c4 tempest-MultipleCreateTestJSON-674370190 tempest-MultipleCreateTestJSON-674370190-project-member] Lock "10da1abe-1c95-44b8-a10d-ce618625b69b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.341s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.252345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Expecting reply to msg 49157a2e1d10498f88008fc80df75722 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.260868] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49157a2e1d10498f88008fc80df75722 [ 1012.261294] env[61649]: DEBUG nova.compute.manager [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] [instance: a086f03c-c993-4e1a-8a3e-efa40bb8b8bd] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1012.263591] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Expecting reply to msg b2b0dd53acaa4f50884458c44653f1ed in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.286487] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2b0dd53acaa4f50884458c44653f1ed [ 1012.287019] env[61649]: DEBUG nova.compute.manager [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] [instance: a086f03c-c993-4e1a-8a3e-efa40bb8b8bd] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1012.287397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Expecting reply to msg edfc610ccc174c49a01eeb46cdec3233 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.302659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edfc610ccc174c49a01eeb46cdec3233 [ 1012.313272] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b9a8e8a-77e1-4b1e-ac66-89ff4c5432d2 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Lock "a086f03c-c993-4e1a-8a3e-efa40bb8b8bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.434s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.313963] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 3b481875a5414da8bed13e9faa627913 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.321542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b481875a5414da8bed13e9faa627913 [ 1012.321957] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1012.323718] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 5107afc4f6db41f2bf8a3555d9efb3fb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.354939] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5107afc4f6db41f2bf8a3555d9efb3fb [ 1012.368329] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.368702] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.370244] env[61649]: INFO nova.compute.claims [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.371946] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 53cffa3fb7a540f5afa83e34046429e9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.406857] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53cffa3fb7a540f5afa83e34046429e9 [ 1012.408701] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 949740b1d14b43868c3347ec907c9a80 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.415668] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 949740b1d14b43868c3347ec907c9a80 [ 1012.714778] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74716453-2d42-4020-b067-e0fe5ac8f76d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.722275] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a601a9-5764-4af3-bdd8-21f4cb01db2f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.751921] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d15fb42-f8a7-44cb-b6fe-e0554f08f802 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.758816] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8656abc-e0c5-4c91-a723-2a55af2c7338 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.773066] env[61649]: DEBUG nova.compute.provider_tree [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.773533] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 5ecd987f947b444b854a83a7aa834229 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.780915] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ecd987f947b444b854a83a7aa834229 [ 1012.781920] env[61649]: DEBUG nova.scheduler.client.report [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1012.784223] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 0a0fb6f4393c4bd38240ae4836ce1b97 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.795498] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a0fb6f4393c4bd38240ae4836ce1b97 [ 1012.795498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.426s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.795498] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1012.797097] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 6037ce28ceca4ac1802148b7a960ae63 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.824439] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6037ce28ceca4ac1802148b7a960ae63 [ 1012.825971] env[61649]: DEBUG nova.compute.utils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.826555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 47b1dd91cc4b42feaf671d94fe14c9be in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.827842] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1012.828017] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1012.838005] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47b1dd91cc4b42feaf671d94fe14c9be [ 1012.838620] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1012.840438] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 33330602b91e4ace81aa65fdd864b687 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.868811] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33330602b91e4ace81aa65fdd864b687 [ 1012.871834] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg bdfa40a52f664442adeb59507a378cdd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1012.874895] env[61649]: DEBUG nova.policy [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01bad05790fd427b936b8dd276a0f314', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d64b106d8414ca88122a5c5e037d285', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1012.905139] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdfa40a52f664442adeb59507a378cdd [ 1012.906901] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1012.927803] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:09:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2051523199',id=37,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-396759899',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1012.928139] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1012.928226] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.928420] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1012.928563] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.928707] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1012.928911] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1012.929074] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1012.929262] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1012.929440] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1012.929646] env[61649]: DEBUG nova.virt.hardware [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.930772] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b973559-ba95-448f-a82d-5208caf24b52 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.939936] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b809ab71-bddb-4d20-bd40-cbcfda054b6c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.195188] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Successfully created port: 0dc3b63d-12a5-41e8-a633-ccec796c3bfd {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.749447] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Successfully updated port: 0dc3b63d-12a5-41e8-a633-ccec796c3bfd {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1013.749972] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 6f810e796a11422da67835169a724d4d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1013.758361] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f810e796a11422da67835169a724d4d [ 1013.758966] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "refresh_cache-c9fe1bfe-e813-43e9-9668-b813416ee27b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.759099] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquired lock "refresh_cache-c9fe1bfe-e813-43e9-9668-b813416ee27b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.759247] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1013.759619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 47ae9ea5f89e408ebe235dc5a208e517 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1013.766670] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47ae9ea5f89e408ebe235dc5a208e517 [ 1013.813276] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1013.982553] env[61649]: DEBUG nova.compute.manager [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Received event network-vif-plugged-0dc3b63d-12a5-41e8-a633-ccec796c3bfd {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1013.982761] env[61649]: DEBUG oslo_concurrency.lockutils [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Acquiring lock "c9fe1bfe-e813-43e9-9668-b813416ee27b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.982957] env[61649]: DEBUG oslo_concurrency.lockutils [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.983121] env[61649]: DEBUG oslo_concurrency.lockutils [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.983286] env[61649]: DEBUG nova.compute.manager [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] No waiting events found dispatching network-vif-plugged-0dc3b63d-12a5-41e8-a633-ccec796c3bfd {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1013.983442] env[61649]: WARNING nova.compute.manager [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Received unexpected event network-vif-plugged-0dc3b63d-12a5-41e8-a633-ccec796c3bfd for instance with vm_state building and task_state spawning. [ 1013.983594] env[61649]: DEBUG nova.compute.manager [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Received event network-changed-0dc3b63d-12a5-41e8-a633-ccec796c3bfd {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1013.983741] env[61649]: DEBUG nova.compute.manager [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Refreshing instance network info cache due to event network-changed-0dc3b63d-12a5-41e8-a633-ccec796c3bfd. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1013.983902] env[61649]: DEBUG oslo_concurrency.lockutils [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Acquiring lock "refresh_cache-c9fe1bfe-e813-43e9-9668-b813416ee27b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.987825] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Updating instance_info_cache with network_info: [{"id": "0dc3b63d-12a5-41e8-a633-ccec796c3bfd", "address": "fa:16:3e:45:31:28", "network": {"id": "df77ad44-b9e7-4d96-955a-8626ed2c70b6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-669366195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d64b106d8414ca88122a5c5e037d285", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dc3b63d-12", "ovs_interfaceid": "0dc3b63d-12a5-41e8-a633-ccec796c3bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.988296] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 735cb71119ac4f739fba3d475d422f26 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1013.997980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 735cb71119ac4f739fba3d475d422f26 [ 1013.998502] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Releasing lock "refresh_cache-c9fe1bfe-e813-43e9-9668-b813416ee27b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.998762] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance network_info: |[{"id": "0dc3b63d-12a5-41e8-a633-ccec796c3bfd", "address": "fa:16:3e:45:31:28", "network": {"id": "df77ad44-b9e7-4d96-955a-8626ed2c70b6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-669366195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d64b106d8414ca88122a5c5e037d285", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dc3b63d-12", "ovs_interfaceid": "0dc3b63d-12a5-41e8-a633-ccec796c3bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1013.999015] env[61649]: DEBUG oslo_concurrency.lockutils [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Acquired lock "refresh_cache-c9fe1bfe-e813-43e9-9668-b813416ee27b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.999227] env[61649]: DEBUG nova.network.neutron [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Refreshing network info cache for port 0dc3b63d-12a5-41e8-a633-ccec796c3bfd {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1013.999661] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Expecting reply to msg 714f2a158ed0478393aeec86cd425946 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1014.000484] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:31:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6365036-aa37-44d2-90d1-ca1c3516ded9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0dc3b63d-12a5-41e8-a633-ccec796c3bfd', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.007617] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Creating folder: Project (1d64b106d8414ca88122a5c5e037d285). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1014.008687] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 714f2a158ed0478393aeec86cd425946 [ 1014.009278] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb48595a-8634-49fe-b25c-23a605e5e99e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.023591] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Created folder: Project (1d64b106d8414ca88122a5c5e037d285) in parent group-v51588. [ 1014.023790] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Creating folder: Instances. Parent ref: group-v51649. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1014.024034] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5d8ae20-b2e5-407e-b00c-c1ec63f3a6f0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.032631] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Created folder: Instances in parent group-v51649. [ 1014.032868] env[61649]: DEBUG oslo.service.loopingcall [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.033056] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1014.033272] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d0ea55e-9d9c-425a-ae5b-95d397357b3c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.051846] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.051846] env[61649]: value = "task-158185" [ 1014.051846] env[61649]: _type = "Task" [ 1014.051846] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.061741] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158185, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.265225] env[61649]: DEBUG nova.network.neutron [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Updated VIF entry in instance network info cache for port 0dc3b63d-12a5-41e8-a633-ccec796c3bfd. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1014.265658] env[61649]: DEBUG nova.network.neutron [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Updating instance_info_cache with network_info: [{"id": "0dc3b63d-12a5-41e8-a633-ccec796c3bfd", "address": "fa:16:3e:45:31:28", "network": {"id": "df77ad44-b9e7-4d96-955a-8626ed2c70b6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-669366195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d64b106d8414ca88122a5c5e037d285", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dc3b63d-12", "ovs_interfaceid": "0dc3b63d-12a5-41e8-a633-ccec796c3bfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.267280] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Expecting reply to msg b0d711516f574741a92e675a905a717d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1014.276204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0d711516f574741a92e675a905a717d [ 1014.276959] env[61649]: DEBUG oslo_concurrency.lockutils [req-5a4a623f-4a85-48d8-a543-967ac3b1a5f4 req-d07b3c54-6078-41bc-8543-bdb0ac4010b3 service nova] Releasing lock "refresh_cache-c9fe1bfe-e813-43e9-9668-b813416ee27b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.563430] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158185, 'name': CreateVM_Task, 'duration_secs': 0.267246} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.563595] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1014.564408] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.564531] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.564905] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1014.565144] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bbea6ab-19c1-41e9-a3a4-907c11f2290b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.569330] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Waiting for the task: (returnval){ [ 1014.569330] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52194c5b-799a-b5e3-1629-eec03685b7df" [ 1014.569330] env[61649]: _type = "Task" [ 1014.569330] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.576961] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52194c5b-799a-b5e3-1629-eec03685b7df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.632512] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 6d1be21a0ddd4533afebd95b78600737 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1014.640957] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d1be21a0ddd4533afebd95b78600737 [ 1014.641430] env[61649]: DEBUG oslo_concurrency.lockutils [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.080532] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.080861] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.081186] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.316073] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "0b0050ff-2714-4068-9956-089c6aa3eff1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.316340] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.423188] env[61649]: DEBUG oslo_concurrency.lockutils [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "71c15bd8-5786-4d44-aa0e-3249b272ac72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.423540] env[61649]: DEBUG oslo_concurrency.lockutils [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "71c15bd8-5786-4d44-aa0e-3249b272ac72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.698182] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] Acquiring lock "d7adbc9f-af82-4f99-8536-4411665e3233" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.698482] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] Lock "d7adbc9f-af82-4f99-8536-4411665e3233" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.103920] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "c7a35269-b314-4381-a8b4-d509d5627861" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.104234] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "c7a35269-b314-4381-a8b4-d509d5627861" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.632195] env[61649]: WARNING oslo_vmware.rw_handles [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1060.632195] env[61649]: ERROR oslo_vmware.rw_handles [ 1060.632764] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1060.634641] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1060.634896] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Copying Virtual Disk [datastore1] vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/b23d2035-9350-4ea4-8465-7c0b8f853431/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1060.635178] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d16b858-39fb-485a-b5a0-e5b7d1cba4ef {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.642585] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 1060.642585] env[61649]: value = "task-158186" [ 1060.642585] env[61649]: _type = "Task" [ 1060.642585] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.650148] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': task-158186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.152718] env[61649]: DEBUG oslo_vmware.exceptions [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1061.153004] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.153537] env[61649]: ERROR nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1061.153537] env[61649]: Faults: ['InvalidArgument'] [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Traceback (most recent call last): [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] yield resources [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self.driver.spawn(context, instance, image_meta, [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self._fetch_image_if_missing(context, vi) [ 1061.153537] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] image_cache(vi, tmp_image_ds_loc) [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] vm_util.copy_virtual_disk( [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] session._wait_for_task(vmdk_copy_task) [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] return self.wait_for_task(task_ref) [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] return evt.wait() [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] result = hub.switch() [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1061.153903] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] return self.greenlet.switch() [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self.f(*self.args, **self.kw) [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] raise exceptions.translate_fault(task_info.error) [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Faults: ['InvalidArgument'] [ 1061.154285] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] [ 1061.154285] env[61649]: INFO nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Terminating instance [ 1061.156446] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1061.156640] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1061.156913] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.157099] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.157811] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5b47e8-c117-463f-9b30-c78b3e836432 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.160352] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a1af8b3-9b60-42b8-afed-3ea182a72d62 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.167164] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1061.167406] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fde24aea-49ed-4ecd-af16-03e75195b50b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.169482] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.169648] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1061.170544] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93ddf38a-0022-4fcd-a565-5e61c4e9c07a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.175251] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Waiting for the task: (returnval){ [ 1061.175251] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]521f16ff-3a79-e376-37d3-f6d276ef446f" [ 1061.175251] env[61649]: _type = "Task" [ 1061.175251] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.186366] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]521f16ff-3a79-e376-37d3-f6d276ef446f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.225534] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1061.225735] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1061.225914] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Deleting the datastore file [datastore1] 99f9912a-edf0-40f5-a7ce-55767081705b {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1061.226158] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73f3685e-71b4-4665-9176-1493a5c489bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.231980] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 1061.231980] env[61649]: value = "task-158188" [ 1061.231980] env[61649]: _type = "Task" [ 1061.231980] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.239352] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': task-158188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.685208] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1061.685505] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Creating directory with path [datastore1] vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.685742] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89491437-185e-44a3-8328-4a3d9c069d65 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.696538] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Created directory with path [datastore1] vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.696729] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Fetch image to [datastore1] vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1061.696897] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1061.697575] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb0c57f-a85f-44b8-ab68-d04c9055310c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.704566] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c72b7c-1635-437e-915f-7706a8919a1f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.713346] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5110937-e405-44f2-825d-1f85a842ae2f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.746616] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfba2490-5ed5-4e55-8da9-1b60c6d52e90 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.753437] env[61649]: DEBUG oslo_vmware.api [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': task-158188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074191} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.754805] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.754993] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1061.755178] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1061.755353] env[61649]: INFO nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1061.757073] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4b655bcb-b005-466c-9cf5-6dd98395cdcd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.758899] env[61649]: DEBUG nova.compute.claims [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1061.759063] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.759288] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.761157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 3e82faba2e254b5981dff41c9d3f8a27 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1061.786219] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1061.792330] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e82faba2e254b5981dff41c9d3f8a27 [ 1061.833637] env[61649]: DEBUG oslo_vmware.rw_handles [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1061.897773] env[61649]: DEBUG oslo_vmware.rw_handles [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1061.898028] env[61649]: DEBUG oslo_vmware.rw_handles [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1062.114090] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fee3e5-1ea7-4f31-bdad-dafee56a503a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.121670] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cac39f-9191-4d8e-8a19-897071d2a967 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.151949] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae04673-5565-4a20-8a83-e57a7b9bff0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.158923] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72aff8ec-eb0b-41a6-a385-2d589f1feaba {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.172081] env[61649]: DEBUG nova.compute.provider_tree [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.172664] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 963fc21668b84a1d8a7d105ad1b5dce0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.180426] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 963fc21668b84a1d8a7d105ad1b5dce0 [ 1062.181562] env[61649]: DEBUG nova.scheduler.client.report [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1062.183784] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 971b964ff1f540aea305b070ef356217 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.197518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 971b964ff1f540aea305b070ef356217 [ 1062.198289] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.439s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.198868] env[61649]: ERROR nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1062.198868] env[61649]: Faults: ['InvalidArgument'] [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Traceback (most recent call last): [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self.driver.spawn(context, instance, image_meta, [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self._fetch_image_if_missing(context, vi) [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] image_cache(vi, tmp_image_ds_loc) [ 1062.198868] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] vm_util.copy_virtual_disk( [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] session._wait_for_task(vmdk_copy_task) [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] return self.wait_for_task(task_ref) [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] return evt.wait() [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] result = hub.switch() [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] return self.greenlet.switch() [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1062.199216] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] self.f(*self.args, **self.kw) [ 1062.199563] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1062.199563] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] raise exceptions.translate_fault(task_info.error) [ 1062.199563] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1062.199563] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Faults: ['InvalidArgument'] [ 1062.199563] env[61649]: ERROR nova.compute.manager [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] [ 1062.200128] env[61649]: DEBUG nova.compute.utils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1062.201661] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Build of instance 99f9912a-edf0-40f5-a7ce-55767081705b was re-scheduled: A specified parameter was not correct: fileType [ 1062.201661] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1062.202125] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1062.202361] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1062.202593] env[61649]: DEBUG nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1062.202815] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1062.464934] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg fdf9ae388e9645cb88661381a7c947ea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.476064] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdf9ae388e9645cb88661381a7c947ea [ 1062.476064] env[61649]: DEBUG nova.network.neutron [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.476064] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg c3e9c1a91c8b47c1b5cff8eefb9f7412 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.488486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3e9c1a91c8b47c1b5cff8eefb9f7412 [ 1062.488486] env[61649]: INFO nova.compute.manager [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Took 0.28 seconds to deallocate network for instance. [ 1062.488486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 9ae2f8b8302f4dc683361944fcf43729 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.523265] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ae2f8b8302f4dc683361944fcf43729 [ 1062.525818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 2ab55d3d332d49489359c43f0bd97744 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.556263] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ab55d3d332d49489359c43f0bd97744 [ 1062.576095] env[61649]: INFO nova.scheduler.client.report [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Deleted allocations for instance 99f9912a-edf0-40f5-a7ce-55767081705b [ 1062.581993] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 05ee306885fd4f90b0615cc79f5688b8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.592114] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05ee306885fd4f90b0615cc79f5688b8 [ 1062.592691] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a8a3cca8-ec56-4634-99a5-f6d219825bc1 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "99f9912a-edf0-40f5-a7ce-55767081705b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 481.091s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.593292] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] Expecting reply to msg 23e6c8f5b65a4f81933175104e655046 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.594308] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "99f9912a-edf0-40f5-a7ce-55767081705b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 82.622s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.594476] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "99f9912a-edf0-40f5-a7ce-55767081705b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.594682] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "99f9912a-edf0-40f5-a7ce-55767081705b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.594842] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "99f9912a-edf0-40f5-a7ce-55767081705b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.596681] env[61649]: INFO nova.compute.manager [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Terminating instance [ 1062.598749] env[61649]: DEBUG nova.compute.manager [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1062.598910] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1062.599414] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56d0ad10-1b5b-49dd-8e14-e3a63ececcf1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.606381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23e6c8f5b65a4f81933175104e655046 [ 1062.609356] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317d638c-dbd8-42de-9e73-9a70336918fc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.620873] env[61649]: DEBUG nova.compute.manager [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] [instance: 7e77db18-077d-4665-ad90-c4e5f470716c] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.622460] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] Expecting reply to msg f02c22c09a0b4dfdbc326eb64e8800aa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.640866] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 99f9912a-edf0-40f5-a7ce-55767081705b could not be found. [ 1062.641066] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1062.641243] env[61649]: INFO nova.compute.manager [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1062.641484] env[61649]: DEBUG oslo.service.loopingcall [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.641747] env[61649]: DEBUG nova.compute.manager [-] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1062.641845] env[61649]: DEBUG nova.network.neutron [-] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1062.644168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f02c22c09a0b4dfdbc326eb64e8800aa [ 1062.644638] env[61649]: DEBUG nova.compute.manager [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] [instance: 7e77db18-077d-4665-ad90-c4e5f470716c] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1062.644965] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] Expecting reply to msg 3ef90b05e4514a1aaf4f86bf3325846c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.652981] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef90b05e4514a1aaf4f86bf3325846c [ 1062.663391] env[61649]: DEBUG oslo_concurrency.lockutils [None req-dcfe390e-b1d0-4822-85c5-431ae78e5652 tempest-ServerTagsTestJSON-661148968 tempest-ServerTagsTestJSON-661148968-project-member] Lock "7e77db18-077d-4665-ad90-c4e5f470716c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.453s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.663921] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Expecting reply to msg 37824c19def94a9384fa81b79a5c79b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.665814] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 364b78bbfe3544f19980a00c842577ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.672070] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 364b78bbfe3544f19980a00c842577ba [ 1062.672493] env[61649]: DEBUG nova.network.neutron [-] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.672853] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 70d4308d70184e489f10ababf9bab293 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.674134] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37824c19def94a9384fa81b79a5c79b7 [ 1062.674527] env[61649]: DEBUG nova.compute.manager [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] [instance: 6efdc98b-e32d-4313-b13f-95c3d4911823] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.677222] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Expecting reply to msg d600d833d31c49d1b066e42f5167a0e8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.679600] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70d4308d70184e489f10ababf9bab293 [ 1062.680048] env[61649]: INFO nova.compute.manager [-] [instance: 99f9912a-edf0-40f5-a7ce-55767081705b] Took 0.04 seconds to deallocate network for instance. [ 1062.683598] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 79e0d51d5dce42aeb755df667233d258 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.702781] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d600d833d31c49d1b066e42f5167a0e8 [ 1062.703307] env[61649]: DEBUG nova.compute.manager [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] [instance: 6efdc98b-e32d-4313-b13f-95c3d4911823] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1062.703627] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Expecting reply to msg 8fcdd501e5fe47f3971308731da0e61a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.709898] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79e0d51d5dce42aeb755df667233d258 [ 1062.712809] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fcdd501e5fe47f3971308731da0e61a [ 1062.723682] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f1e0b66c-b8ee-45b4-846e-9c16f13450a7 tempest-VolumesAdminNegativeTest-523549464 tempest-VolumesAdminNegativeTest-523549464-project-member] Lock "6efdc98b-e32d-4313-b13f-95c3d4911823" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.297s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.724255] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] Expecting reply to msg f53522dc730b43e9be748034879b6466 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.726260] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 5cbdc1478ce54afcad8087338cdc7d83 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.732900] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f53522dc730b43e9be748034879b6466 [ 1062.733315] env[61649]: DEBUG nova.compute.manager [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] [instance: f51cfd74-25e5-4077-9b43-8cb38fe051f8] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.735021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] Expecting reply to msg 7d9c45815f5d488e8e3169adcda011b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.757642] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d9c45815f5d488e8e3169adcda011b4 [ 1062.758310] env[61649]: DEBUG nova.compute.manager [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] [instance: f51cfd74-25e5-4077-9b43-8cb38fe051f8] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1062.758707] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] Expecting reply to msg 52f9a4ccba49479a9491cf1bf41a805f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.759752] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cbdc1478ce54afcad8087338cdc7d83 [ 1062.762389] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "99f9912a-edf0-40f5-a7ce-55767081705b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.762626] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ee196c32-839a-4304-a799-032140bc8a2c tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 763beafcd8a54a3fba6305bed853b33b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.767762] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52f9a4ccba49479a9491cf1bf41a805f [ 1062.774145] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 763beafcd8a54a3fba6305bed853b33b [ 1062.778422] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d666fcda-ed1c-4a20-a30b-afa8e2049bb9 tempest-ServerActionsTestOtherB-464316570 tempest-ServerActionsTestOtherB-464316570-project-member] Lock "f51cfd74-25e5-4077-9b43-8cb38fe051f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.144s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.779129] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 13f9c7db611c42ccb1d656aa2ba2c72f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.787787] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13f9c7db611c42ccb1d656aa2ba2c72f [ 1062.788192] env[61649]: DEBUG nova.compute.manager [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: db7db1c9-6716-4591-b669-b85dd595a3e9] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.790426] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 687fbd1a57954095bd55804ade5b6b00 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.810628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 687fbd1a57954095bd55804ade5b6b00 [ 1062.811029] env[61649]: DEBUG nova.compute.manager [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: db7db1c9-6716-4591-b669-b85dd595a3e9] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1062.811364] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg a7e1ed20316c4af5b2c3b3fe754e133c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.819719] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7e1ed20316c4af5b2c3b3fe754e133c [ 1062.830156] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7c3715d9-3afd-4e7e-8ab8-c8af166fd2f4 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "db7db1c9-6716-4591-b669-b85dd595a3e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.060s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.830689] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] Expecting reply to msg 998f7cdef3ff4ff292050a6da407d0da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.840218] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 998f7cdef3ff4ff292050a6da407d0da [ 1062.840706] env[61649]: DEBUG nova.compute.manager [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] [instance: 899db5f5-4963-4f7a-97d2-9c2dfd7a6981] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.842349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] Expecting reply to msg 913f482efa0c4c7cbe53f86461782f38 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.863466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 913f482efa0c4c7cbe53f86461782f38 [ 1062.864017] env[61649]: DEBUG nova.compute.manager [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] [instance: 899db5f5-4963-4f7a-97d2-9c2dfd7a6981] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1062.864294] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] Expecting reply to msg f1b010ba930347c0b619dbbe4b6bb554 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.874347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1b010ba930347c0b619dbbe4b6bb554 [ 1062.885580] env[61649]: DEBUG oslo_concurrency.lockutils [None req-29301568-9498-49da-a649-6f06e5d919ae tempest-ImagesOneServerNegativeTestJSON-2134550288 tempest-ImagesOneServerNegativeTestJSON-2134550288-project-member] Lock "899db5f5-4963-4f7a-97d2-9c2dfd7a6981" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.051s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.886091] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg f51d7908d3bf4c97be8a7e4528291c1e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.894926] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f51d7908d3bf4c97be8a7e4528291c1e [ 1062.894926] env[61649]: DEBUG nova.compute.manager [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.895995] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 34a7c237daf74a4294b879fae9a78aa6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.916815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34a7c237daf74a4294b879fae9a78aa6 [ 1062.917354] env[61649]: DEBUG nova.compute.manager [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1062.917680] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg da4d60a39e15445591dcf7b3108d7223 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.927086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da4d60a39e15445591dcf7b3108d7223 [ 1062.938947] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7325177b-3f12-4789-98e6-dfb31afaf4a2 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "7de3e5cd-94ee-4a80-8baf-17ccfed9d8c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.550s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.939525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 01eee2c70caf4a7f879e3f45a742c4e4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.947256] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01eee2c70caf4a7f879e3f45a742c4e4 [ 1062.947618] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1062.949278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 740ab4d630d14ec58867747ae4fa6f34 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1062.981861] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 740ab4d630d14ec58867747ae4fa6f34 [ 1062.998996] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.999296] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.000811] env[61649]: INFO nova.compute.claims [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1063.002340] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 8de2b5ff66d7418aadd7bf7e78fa6f6e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.033119] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8de2b5ff66d7418aadd7bf7e78fa6f6e [ 1063.034744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg db5f44d484bb4a209383bb359200f7e1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.043056] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db5f44d484bb4a209383bb359200f7e1 [ 1063.296099] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050b1585-88c3-4d79-9efa-f191ad09c72b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.303729] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bef986b-15d4-47d0-8c66-e51c00ace3cf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.335547] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc08b779-feb8-405a-bdbd-63f278958029 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.342496] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a09774-beda-4360-8fed-10147db31233 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.355024] env[61649]: DEBUG nova.compute.provider_tree [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.355482] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 0980509e31f240f3b07b53080d169c4b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.362989] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0980509e31f240f3b07b53080d169c4b [ 1063.363898] env[61649]: DEBUG nova.scheduler.client.report [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1063.366147] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 72d532d7556249fc9d0e781218e48b3f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.376354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72d532d7556249fc9d0e781218e48b3f [ 1063.377030] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.377615] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1063.379262] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg caf1c877fc784a08b4a98bb784e2a76c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.412954] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg caf1c877fc784a08b4a98bb784e2a76c [ 1063.414196] env[61649]: DEBUG nova.compute.utils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1063.414773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg e7b587c724af49909ccaf9a9da27b13a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.415525] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1063.415685] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1063.425433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7b587c724af49909ccaf9a9da27b13a [ 1063.425960] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1063.427546] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 5457025b4b3d4c2381bfe54f2a68ea9c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.454377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5457025b4b3d4c2381bfe54f2a68ea9c [ 1063.457022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 78f4e142b6ad4031b5c52e4f1e327779 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1063.500992] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78f4e142b6ad4031b5c52e4f1e327779 [ 1063.502177] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1063.525265] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1063.527253] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1063.527447] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.527646] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1063.527787] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.527928] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1063.528148] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1063.528306] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1063.528466] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1063.528623] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1063.528787] env[61649]: DEBUG nova.virt.hardware [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1063.529701] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff774d2-cd46-4c34-af3d-962caa1d6a12 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.539013] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f34b17a-eb8d-443d-8945-eadb573dfb19 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.679183] env[61649]: DEBUG nova.policy [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a93f743be79453383fa2022be09f54b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f540bccb70cd472a870ea2f8d9df8fda', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1063.975677] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Successfully created port: 3bb5adcc-6cc3-419a-aad4-69ad5f728f49 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1064.532166] env[61649]: DEBUG nova.compute.manager [req-a2d8cc59-c91f-4d27-bd91-8d575729e4ae req-e28addae-5020-4fab-9ff6-b1d78ed67711 service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Received event network-vif-plugged-3bb5adcc-6cc3-419a-aad4-69ad5f728f49 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1064.532358] env[61649]: DEBUG oslo_concurrency.lockutils [req-a2d8cc59-c91f-4d27-bd91-8d575729e4ae req-e28addae-5020-4fab-9ff6-b1d78ed67711 service nova] Acquiring lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.532586] env[61649]: DEBUG oslo_concurrency.lockutils [req-a2d8cc59-c91f-4d27-bd91-8d575729e4ae req-e28addae-5020-4fab-9ff6-b1d78ed67711 service nova] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.532749] env[61649]: DEBUG oslo_concurrency.lockutils [req-a2d8cc59-c91f-4d27-bd91-8d575729e4ae req-e28addae-5020-4fab-9ff6-b1d78ed67711 service nova] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.532914] env[61649]: DEBUG nova.compute.manager [req-a2d8cc59-c91f-4d27-bd91-8d575729e4ae req-e28addae-5020-4fab-9ff6-b1d78ed67711 service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] No waiting events found dispatching network-vif-plugged-3bb5adcc-6cc3-419a-aad4-69ad5f728f49 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1064.533070] env[61649]: WARNING nova.compute.manager [req-a2d8cc59-c91f-4d27-bd91-8d575729e4ae req-e28addae-5020-4fab-9ff6-b1d78ed67711 service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Received unexpected event network-vif-plugged-3bb5adcc-6cc3-419a-aad4-69ad5f728f49 for instance with vm_state building and task_state spawning. [ 1064.545577] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Successfully updated port: 3bb5adcc-6cc3-419a-aad4-69ad5f728f49 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1064.546059] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg f343bae9b3884e499c70ecb909658fc7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1064.553134] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f343bae9b3884e499c70ecb909658fc7 [ 1064.553751] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "refresh_cache-9cdd96c2-2837-4cb3-855c-ecad727dd5d4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.553889] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquired lock "refresh_cache-9cdd96c2-2837-4cb3-855c-ecad727dd5d4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.554081] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1064.554945] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg aa7e079b7a1242a887c9dfaf2780c10e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1064.567450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa7e079b7a1242a887c9dfaf2780c10e [ 1064.595351] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1064.826543] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Updating instance_info_cache with network_info: [{"id": "3bb5adcc-6cc3-419a-aad4-69ad5f728f49", "address": "fa:16:3e:51:4a:32", "network": {"id": "59d18de9-3c85-4f3b-b89d-41bc5fded0e4", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2005770611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f540bccb70cd472a870ea2f8d9df8fda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d12aff80-9d1b-4a67-a470-9c0148b443e3", "external-id": "nsx-vlan-transportzone-784", "segmentation_id": 784, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb5adcc-6c", "ovs_interfaceid": "3bb5adcc-6cc3-419a-aad4-69ad5f728f49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.827077] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 8e0b8f2da4654e04804a8c346d33d659 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1064.840846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e0b8f2da4654e04804a8c346d33d659 [ 1064.841472] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Releasing lock "refresh_cache-9cdd96c2-2837-4cb3-855c-ecad727dd5d4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.841753] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance network_info: |[{"id": "3bb5adcc-6cc3-419a-aad4-69ad5f728f49", "address": "fa:16:3e:51:4a:32", "network": {"id": "59d18de9-3c85-4f3b-b89d-41bc5fded0e4", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2005770611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f540bccb70cd472a870ea2f8d9df8fda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d12aff80-9d1b-4a67-a470-9c0148b443e3", "external-id": "nsx-vlan-transportzone-784", "segmentation_id": 784, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb5adcc-6c", "ovs_interfaceid": "3bb5adcc-6cc3-419a-aad4-69ad5f728f49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1064.842145] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:4a:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd12aff80-9d1b-4a67-a470-9c0148b443e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bb5adcc-6cc3-419a-aad4-69ad5f728f49', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1064.849933] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Creating folder: Project (f540bccb70cd472a870ea2f8d9df8fda). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1064.850571] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6265eb7-1584-4b8f-86f5-b9f22dc5d457 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.861588] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Created folder: Project (f540bccb70cd472a870ea2f8d9df8fda) in parent group-v51588. [ 1064.861776] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Creating folder: Instances. Parent ref: group-v51652. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1064.861997] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14abfaa5-3541-42a8-a9ec-44208d73397e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.870566] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Created folder: Instances in parent group-v51652. [ 1064.870780] env[61649]: DEBUG oslo.service.loopingcall [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.870954] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1064.871139] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba1e8b23-5090-479c-b777-ed0406110dcf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.890170] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1064.890170] env[61649]: value = "task-158191" [ 1064.890170] env[61649]: _type = "Task" [ 1064.890170] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.897924] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158191, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.400080] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158191, 'name': CreateVM_Task, 'duration_secs': 0.263881} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.400613] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1065.401377] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.401657] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.402160] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1065.402495] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dc8983a-e5c7-4ef8-92b1-ad8c5c4b1b5b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.406973] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Waiting for the task: (returnval){ [ 1065.406973] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52530efe-ca39-def3-8314-7df7e55b4a0d" [ 1065.406973] env[61649]: _type = "Task" [ 1065.406973] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.414714] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52530efe-ca39-def3-8314-7df7e55b4a0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.919950] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.920341] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.920649] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.012986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 6085c75245184ccfa1ddca0e12eef291 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1066.022209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6085c75245184ccfa1ddca0e12eef291 [ 1066.022676] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.596437] env[61649]: DEBUG nova.compute.manager [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Received event network-changed-3bb5adcc-6cc3-419a-aad4-69ad5f728f49 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1066.596700] env[61649]: DEBUG nova.compute.manager [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Refreshing instance network info cache due to event network-changed-3bb5adcc-6cc3-419a-aad4-69ad5f728f49. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1066.596815] env[61649]: DEBUG oslo_concurrency.lockutils [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] Acquiring lock "refresh_cache-9cdd96c2-2837-4cb3-855c-ecad727dd5d4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.596961] env[61649]: DEBUG oslo_concurrency.lockutils [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] Acquired lock "refresh_cache-9cdd96c2-2837-4cb3-855c-ecad727dd5d4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.597134] env[61649]: DEBUG nova.network.neutron [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Refreshing network info cache for port 3bb5adcc-6cc3-419a-aad4-69ad5f728f49 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1066.597619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] Expecting reply to msg b8f9c999fb274c61ba92d167ae96a0fa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1066.604518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8f9c999fb274c61ba92d167ae96a0fa [ 1066.883490] env[61649]: DEBUG nova.network.neutron [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Updated VIF entry in instance network info cache for port 3bb5adcc-6cc3-419a-aad4-69ad5f728f49. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1066.884065] env[61649]: DEBUG nova.network.neutron [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Updating instance_info_cache with network_info: [{"id": "3bb5adcc-6cc3-419a-aad4-69ad5f728f49", "address": "fa:16:3e:51:4a:32", "network": {"id": "59d18de9-3c85-4f3b-b89d-41bc5fded0e4", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2005770611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f540bccb70cd472a870ea2f8d9df8fda", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d12aff80-9d1b-4a67-a470-9c0148b443e3", "external-id": "nsx-vlan-transportzone-784", "segmentation_id": 784, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb5adcc-6c", "ovs_interfaceid": "3bb5adcc-6cc3-419a-aad4-69ad5f728f49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.884847] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] Expecting reply to msg a959502857104f2e984ee71ac97cd88d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1066.897444] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a959502857104f2e984ee71ac97cd88d [ 1066.898051] env[61649]: DEBUG oslo_concurrency.lockutils [req-8ab6a70b-412e-4dcd-a56a-4ec19790ad12 req-ee43bb08-0cc2-4a68-bbd4-c493fc33621c service nova] Releasing lock "refresh_cache-9cdd96c2-2837-4cb3-855c-ecad727dd5d4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.928671] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.929527] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.924738] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.928552] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.929391] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.929722] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1070.929722] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1070.930270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 47dbe22ece6b4d02ae5c83a7a0f8aa40 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1070.947373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47dbe22ece6b4d02ae5c83a7a0f8aa40 [ 1070.949670] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.949822] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.949955] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950081] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950216] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950374] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950496] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950617] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950735] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950853] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1070.950969] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1070.951428] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.951609] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.951765] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.951893] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1070.952054] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.952350] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 862cdca360d249f78f647be97d721a46 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1070.961315] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 862cdca360d249f78f647be97d721a46 [ 1070.961315] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.961458] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.961541] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.961689] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1070.962698] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf074ebb-6423-4bb6-9ae1-f21d8b14985a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.971364] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2a17d1-219c-4e87-92e5-9efc7dfc90d5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.984919] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004b8116-1654-45a4-b7b1-2c58a2ca38b6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.991182] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16696e00-e202-49d9-a6ed-6b5f3a065971 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.021015] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181774MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1071.021147] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.021605] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.022077] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 02ed301770e541d7b5db62270695127a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.057030] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02ed301770e541d7b5db62270695127a [ 1071.061041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b27c9a4bf3754b60a6ad31ffecd1c10f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.070338] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b27c9a4bf3754b60a6ad31ffecd1c10f [ 1071.090769] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.090917] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091044] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091164] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091280] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091395] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091510] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091625] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091736] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.091847] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.092380] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 007266a4fd2444608ab90698e42868e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.101795] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 007266a4fd2444608ab90698e42868e3 [ 1071.102450] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 3e1127c5-876f-47b9-b652-7a558711a1a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.102894] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3a2dc5d5f9bf4d87a45746c7c0a74502 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.111710] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a2dc5d5f9bf4d87a45746c7c0a74502 [ 1071.112300] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.112710] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 63279febced34fa2a8225cb779565625 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.121129] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63279febced34fa2a8225cb779565625 [ 1071.121854] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.122267] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 754d681c8f704710b64377d758038fb7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.131009] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 754d681c8f704710b64377d758038fb7 [ 1071.131583] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0534f500-d8d8-4aad-896c-c965778c3a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.131989] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f7715dc2f8db4f1a9b70b5fb945a75a8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.140527] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7715dc2f8db4f1a9b70b5fb945a75a8 [ 1071.141259] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6d3ee887-6b6b-4199-aea6-f0de0153e5c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.141725] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 187130132d024f338ce22b9630fbf30d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.150176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 187130132d024f338ce22b9630fbf30d [ 1071.150799] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 82ae439e-5fe0-4bed-b550-e34929c139f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.151145] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ea31c6a9a0b1403191113f7a1507fb63 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.159906] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea31c6a9a0b1403191113f7a1507fb63 [ 1071.160792] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 40b216ba-3afd-4cfe-b98b-c5de03501317 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.161358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5fb2a3a6d1cc4068903b5c0880137ff0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.169972] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fb2a3a6d1cc4068903b5c0880137ff0 [ 1071.170741] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a12bff02-f7da-43a1-b614-beb3d6908e0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.171278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e6ae1ceb9bf4433c8d60b0d045191259 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.179769] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6ae1ceb9bf4433c8d60b0d045191259 [ 1071.180550] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2b4cc40e-a6fc-48df-baaf-f74352c24408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.181099] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 98d039fb34094063bdf4e510d06a90d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.190588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98d039fb34094063bdf4e510d06a90d2 [ 1071.191279] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.191824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a392b663c53d47ec89821fc51f0b0807 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.200222] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a392b663c53d47ec89821fc51f0b0807 [ 1071.200968] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 71c15bd8-5786-4d44-aa0e-3249b272ac72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.201539] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1699a8937be04db5b27362d2105b8e97 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.226969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1699a8937be04db5b27362d2105b8e97 [ 1071.227884] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d7adbc9f-af82-4f99-8536-4411665e3233 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.229519] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 26a44c2a70d94f19b772ce74a6666277 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.239585] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26a44c2a70d94f19b772ce74a6666277 [ 1071.240430] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c7a35269-b314-4381-a8b4-d509d5627861 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.240810] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1071.241156] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1071.494248] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3742093-0b6c-4e37-abb7-7f974716a31a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.501528] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a996b4-dbf9-48c4-9f35-8493aa4f8693 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.534508] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aec0fe7-1722-4094-b686-495c42e0835e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.542526] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11c02d8-5886-4b76-ad92-41f48ebff59d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.555663] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.556294] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e5214f41aa184ed09806ccf4554a2305 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.563502] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5214f41aa184ed09806ccf4554a2305 [ 1071.564491] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1071.566925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9aae4a39d24943f48c22030487d9b72d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1071.594616] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aae4a39d24943f48c22030487d9b72d [ 1071.595045] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1071.595309] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.574s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.461646] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.461937] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.794626] env[61649]: WARNING oslo_vmware.rw_handles [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1110.794626] env[61649]: ERROR oslo_vmware.rw_handles [ 1110.794626] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1110.796914] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1110.797221] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Copying Virtual Disk [datastore1] vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/1e47f526-5d6c-4f99-bd7b-cdef8642e6cb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1110.797554] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5188b51a-89b0-41a6-9563-544a3ac83012 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.805080] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Waiting for the task: (returnval){ [ 1110.805080] env[61649]: value = "task-158192" [ 1110.805080] env[61649]: _type = "Task" [ 1110.805080] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.812773] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Task: {'id': task-158192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.315128] env[61649]: DEBUG oslo_vmware.exceptions [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1111.315415] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.315976] env[61649]: ERROR nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1111.315976] env[61649]: Faults: ['InvalidArgument'] [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Traceback (most recent call last): [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] yield resources [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self.driver.spawn(context, instance, image_meta, [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self._fetch_image_if_missing(context, vi) [ 1111.315976] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] image_cache(vi, tmp_image_ds_loc) [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] vm_util.copy_virtual_disk( [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] session._wait_for_task(vmdk_copy_task) [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] return self.wait_for_task(task_ref) [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] return evt.wait() [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] result = hub.switch() [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1111.316369] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] return self.greenlet.switch() [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self.f(*self.args, **self.kw) [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] raise exceptions.translate_fault(task_info.error) [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Faults: ['InvalidArgument'] [ 1111.316753] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] [ 1111.316753] env[61649]: INFO nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Terminating instance [ 1111.317891] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.318103] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.318338] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-673632c7-fb7a-40c1-93f9-701da46539ee {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.320546] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1111.320747] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1111.321458] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809bd114-2dfa-4b7f-8295-e61b5c3b6801 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.327904] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1111.328135] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77915947-5fa6-496d-9f26-7de94e730fb7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.330324] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.330517] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1111.331469] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaede2be-2353-459e-b8f8-0102f23140d6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.336113] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 1111.336113] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]525d1cda-5802-a780-0650-b797329ba050" [ 1111.336113] env[61649]: _type = "Task" [ 1111.336113] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.343339] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]525d1cda-5802-a780-0650-b797329ba050, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.396476] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1111.396818] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1111.397067] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Deleting the datastore file [datastore1] 7f9f2074-6822-4d9d-9791-4bebc7e55862 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.397442] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d40b7e2-79e4-4e1d-ac29-7097484bf664 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.404351] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Waiting for the task: (returnval){ [ 1111.404351] env[61649]: value = "task-158194" [ 1111.404351] env[61649]: _type = "Task" [ 1111.404351] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.412881] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Task: {'id': task-158194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.846416] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1111.846416] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating directory with path [datastore1] vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.846715] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db29d9f9-1df8-4c69-b254-12a3c56dca8e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.857791] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Created directory with path [datastore1] vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.857981] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Fetch image to [datastore1] vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1111.858143] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1111.858873] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae2358b-27d4-4ad7-bb8d-5c348b424a60 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.865288] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359eacb5-e516-4d54-a15b-3a3fb0444e2a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.874063] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacfe516-5c68-4476-8823-10058063bd42 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.908863] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657d3677-7f4a-460d-aaf0-77140a4f436b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.917530] env[61649]: DEBUG oslo_vmware.api [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Task: {'id': task-158194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069529} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.919578] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1111.919792] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1111.919963] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1111.920179] env[61649]: INFO nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1111.922410] env[61649]: DEBUG nova.compute.claims [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1111.922486] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.922631] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.924588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 7756c8663dd249ec84008e2e26694fa6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1111.926269] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3727af98-ad9e-40b4-8e02-99c3cfd8cd1b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.946470] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1111.962460] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7756c8663dd249ec84008e2e26694fa6 [ 1111.994980] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1112.059800] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1112.059990] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1112.283150] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79495f8a-160b-479a-88fe-b9bbf721f737 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.290743] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16480d17-10f3-45cd-b7d3-5b9c963fa933 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.320591] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eba445-7978-472b-95b0-e126cb15cc53 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.327193] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0328e495-8d41-4ef4-b459-834773d9e76b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.339884] env[61649]: DEBUG nova.compute.provider_tree [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.340405] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 3ce1e5fd22c14ef8bb77c088f51b6380 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.347551] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ce1e5fd22c14ef8bb77c088f51b6380 [ 1112.348434] env[61649]: DEBUG nova.scheduler.client.report [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1112.350804] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 4cbc09bff1064f5ea7302f1d8055c455 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.363008] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cbc09bff1064f5ea7302f1d8055c455 [ 1112.363672] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.441s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.364205] env[61649]: ERROR nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1112.364205] env[61649]: Faults: ['InvalidArgument'] [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Traceback (most recent call last): [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self.driver.spawn(context, instance, image_meta, [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self._fetch_image_if_missing(context, vi) [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] image_cache(vi, tmp_image_ds_loc) [ 1112.364205] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] vm_util.copy_virtual_disk( [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] session._wait_for_task(vmdk_copy_task) [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] return self.wait_for_task(task_ref) [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] return evt.wait() [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] result = hub.switch() [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] return self.greenlet.switch() [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1112.364788] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] self.f(*self.args, **self.kw) [ 1112.365340] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1112.365340] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] raise exceptions.translate_fault(task_info.error) [ 1112.365340] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1112.365340] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Faults: ['InvalidArgument'] [ 1112.365340] env[61649]: ERROR nova.compute.manager [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] [ 1112.365340] env[61649]: DEBUG nova.compute.utils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1112.366249] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Build of instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 was re-scheduled: A specified parameter was not correct: fileType [ 1112.366249] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1112.366623] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1112.366795] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1112.366963] env[61649]: DEBUG nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1112.367124] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1112.658544] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg ee8eba53bc5b41a7b4efb42e0c2f1097 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.665197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee8eba53bc5b41a7b4efb42e0c2f1097 [ 1112.665737] env[61649]: DEBUG nova.network.neutron [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.666193] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 3d9bb81677814440b2f689cc4bcd8161 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.680031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d9bb81677814440b2f689cc4bcd8161 [ 1112.680031] env[61649]: INFO nova.compute.manager [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Took 0.31 seconds to deallocate network for instance. [ 1112.681381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg badee0a0f8ec4fa0bd2ce76fd4938972 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.722481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg badee0a0f8ec4fa0bd2ce76fd4938972 [ 1112.723935] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 69f4e32713de4b44bc3206784aae6762 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.756656] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69f4e32713de4b44bc3206784aae6762 [ 1112.776117] env[61649]: INFO nova.scheduler.client.report [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Deleted allocations for instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 [ 1112.787598] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 7c1ddad95c1140659e9092974e2740c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.797825] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c1ddad95c1140659e9092974e2740c4 [ 1112.798405] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5c57a1a7-fbc7-4c15-8188-ee04fc8bbe9b tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 530.142s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.798869] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] Expecting reply to msg 767ee09f96624799b203caabf7755603 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.799614] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 331.418s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.799844] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Acquiring lock "7f9f2074-6822-4d9d-9791-4bebc7e55862-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.800045] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.800196] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.802161] env[61649]: INFO nova.compute.manager [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Terminating instance [ 1112.803795] env[61649]: DEBUG nova.compute.manager [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1112.803988] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1112.804474] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8ea3453-37a2-4f10-8bcc-860db443b563 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.813133] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90aa39f6-35cd-4568-8761-f41cf6bd7f5d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.823318] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 767ee09f96624799b203caabf7755603 [ 1112.823814] env[61649]: DEBUG nova.compute.manager [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] [instance: 3e1127c5-876f-47b9-b652-7a558711a1a7] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1112.825624] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] Expecting reply to msg 2e6fd19c30814c098b2906d28fcb8298 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.844395] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7f9f2074-6822-4d9d-9791-4bebc7e55862 could not be found. [ 1112.844504] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1112.844592] env[61649]: INFO nova.compute.manager [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1112.844843] env[61649]: DEBUG oslo.service.loopingcall [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1112.845061] env[61649]: DEBUG nova.compute.manager [-] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1112.845156] env[61649]: DEBUG nova.network.neutron [-] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1112.848910] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e6fd19c30814c098b2906d28fcb8298 [ 1112.849441] env[61649]: DEBUG nova.compute.manager [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] [instance: 3e1127c5-876f-47b9-b652-7a558711a1a7] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1112.849776] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] Expecting reply to msg a9a6eea5db7245f8918e00e5e23b4630 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.858879] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9a6eea5db7245f8918e00e5e23b4630 [ 1112.884071] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg daf61f9b10fa415cadcc8c12b2c516db in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.884071] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77d8f494-4ca1-4293-a5c4-ebde1bfc3c98 tempest-ServerActionsV293TestJSON-761580856 tempest-ServerActionsV293TestJSON-761580856-project-member] Lock "3e1127c5-876f-47b9-b652-7a558711a1a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.161s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.884071] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4be19df86fb9440491bdbe24194e0bd2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.887234] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daf61f9b10fa415cadcc8c12b2c516db [ 1112.887656] env[61649]: DEBUG nova.network.neutron [-] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.888058] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cb595d4aa47046bda2de2aaceae7b403 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.892604] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4be19df86fb9440491bdbe24194e0bd2 [ 1112.892996] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1112.894633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg e47684801bb04b1f81d18c232c46c494 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.895985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb595d4aa47046bda2de2aaceae7b403 [ 1112.896616] env[61649]: INFO nova.compute.manager [-] [instance: 7f9f2074-6822-4d9d-9791-4bebc7e55862] Took 0.05 seconds to deallocate network for instance. [ 1112.900242] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg e84dd5a99fa34b828bfef03fb8fa0514 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.925906] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e84dd5a99fa34b828bfef03fb8fa0514 [ 1112.932214] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e47684801bb04b1f81d18c232c46c494 [ 1112.943670] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 6bd3bbe8d2f14884bf2785e71bf56367 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.950431] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.952123] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.953973] env[61649]: INFO nova.compute.claims [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.955626] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 003dfa644d504238bd9816f6e97c7ff2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.977730] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bd3bbe8d2f14884bf2785e71bf56367 [ 1112.981502] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Lock "7f9f2074-6822-4d9d-9791-4bebc7e55862" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.982072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d31b6b3b-a78c-450d-8c88-df36c6e81767 tempest-ServerActionsTestJSON-787621936 tempest-ServerActionsTestJSON-787621936-project-member] Expecting reply to msg 78a4d8bd512f419892f7bb13c67fe52a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1112.994703] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78a4d8bd512f419892f7bb13c67fe52a [ 1112.995527] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 003dfa644d504238bd9816f6e97c7ff2 [ 1112.997118] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 054b245246d6433090d24288456062b9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.007053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 054b245246d6433090d24288456062b9 [ 1113.097064] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "5f67180f-6b27-4487-8858-5f57fcffd041" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.097298] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "5f67180f-6b27-4487-8858-5f57fcffd041" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.277304] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34218d8-0464-4d95-8457-9fec79dde7b0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.284669] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffc7fc1-cee1-4eaf-958e-4607bdeee4ab {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.313145] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47437e44-ffc7-4199-bf50-8e01e9846b1f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.319819] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e829c2a-6ff0-4fc2-95bd-eb6e0ecb9179 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.333186] env[61649]: DEBUG nova.compute.provider_tree [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.333619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 2ff6eb53c2d74552ba14a9cfd7701771 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.340958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ff6eb53c2d74552ba14a9cfd7701771 [ 1113.341798] env[61649]: DEBUG nova.scheduler.client.report [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1113.343942] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg eb48bd91a9274e2097f16e13106e2c8b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.356380] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb48bd91a9274e2097f16e13106e2c8b [ 1113.357215] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.357666] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1113.359208] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg f7c027be4f144ed18086d5df5ce55e74 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.388572] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7c027be4f144ed18086d5df5ce55e74 [ 1113.389916] env[61649]: DEBUG nova.compute.utils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1113.390478] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d1c7d57ac9474c4ea4c4ea69aac0d682 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.391194] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1113.391359] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1113.399112] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1c7d57ac9474c4ea4c4ea69aac0d682 [ 1113.399607] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1113.401110] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b74b35b9485e43699b63235b524912e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.428548] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b74b35b9485e43699b63235b524912e5 [ 1113.431037] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 1ce85561c05f4ce1a8eb4c25255c80c6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1113.434177] env[61649]: DEBUG nova.policy [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eecfef918474dc8ad298d9eb189f56f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3939f446f6f04aa08a0b91101e55572b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1113.457788] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ce85561c05f4ce1a8eb4c25255c80c6 [ 1113.458778] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1113.480417] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1113.480655] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1113.480811] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.480990] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1113.481137] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.481315] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1113.481480] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1113.481642] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1113.481960] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1113.482158] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1113.482336] env[61649]: DEBUG nova.virt.hardware [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1113.483207] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b3d233-3dd8-409a-81f3-d8c3318effb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.491111] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3877a10-4f2b-4e44-842c-9225c85ac889 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.739204] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Successfully created port: cf26202b-41b4-4bff-9c95-06665d78a98f {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1114.338663] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Successfully updated port: cf26202b-41b4-4bff-9c95-06665d78a98f {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.338663] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg dc7db867df5342a581f894d847bec709 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1114.347146] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc7db867df5342a581f894d847bec709 [ 1114.347848] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "refresh_cache-5dc4bde6-db61-47c2-a2b8-d2a5515b1525" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.347995] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "refresh_cache-5dc4bde6-db61-47c2-a2b8-d2a5515b1525" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.348157] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1114.348532] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 253e2e63b1c54fe1802869f53b7dbde4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1114.355757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 253e2e63b1c54fe1802869f53b7dbde4 [ 1114.385034] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1114.518563] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Updating instance_info_cache with network_info: [{"id": "cf26202b-41b4-4bff-9c95-06665d78a98f", "address": "fa:16:3e:a0:c4:f8", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf26202b-41", "ovs_interfaceid": "cf26202b-41b4-4bff-9c95-06665d78a98f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.519086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 181464bdba824239bc8bf46d8ab223fd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1114.531194] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 181464bdba824239bc8bf46d8ab223fd [ 1114.531803] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "refresh_cache-5dc4bde6-db61-47c2-a2b8-d2a5515b1525" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.532096] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance network_info: |[{"id": "cf26202b-41b4-4bff-9c95-06665d78a98f", "address": "fa:16:3e:a0:c4:f8", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf26202b-41", "ovs_interfaceid": "cf26202b-41b4-4bff-9c95-06665d78a98f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1114.532503] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:c4:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf26202b-41b4-4bff-9c95-06665d78a98f', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.539933] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating folder: Project (3939f446f6f04aa08a0b91101e55572b). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.540431] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b67f2f1-06fd-4afe-acc7-1a0cedd3c339 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.553372] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created folder: Project (3939f446f6f04aa08a0b91101e55572b) in parent group-v51588. [ 1114.553554] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating folder: Instances. Parent ref: group-v51655. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.553777] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7222c656-c8b9-4ea8-b733-d57c199a3d1b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.562350] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created folder: Instances in parent group-v51655. [ 1114.562571] env[61649]: DEBUG oslo.service.loopingcall [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.562743] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1114.562924] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b47b2e71-e079-46e2-9897-39a92a7fa560 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.581109] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.581109] env[61649]: value = "task-158197" [ 1114.581109] env[61649]: _type = "Task" [ 1114.581109] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.588965] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158197, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.757403] env[61649]: DEBUG nova.compute.manager [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Received event network-vif-plugged-cf26202b-41b4-4bff-9c95-06665d78a98f {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1114.757630] env[61649]: DEBUG oslo_concurrency.lockutils [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Acquiring lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.757853] env[61649]: DEBUG oslo_concurrency.lockutils [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.758026] env[61649]: DEBUG oslo_concurrency.lockutils [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.758212] env[61649]: DEBUG nova.compute.manager [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] No waiting events found dispatching network-vif-plugged-cf26202b-41b4-4bff-9c95-06665d78a98f {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1114.758391] env[61649]: WARNING nova.compute.manager [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Received unexpected event network-vif-plugged-cf26202b-41b4-4bff-9c95-06665d78a98f for instance with vm_state building and task_state spawning. [ 1114.758563] env[61649]: DEBUG nova.compute.manager [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Received event network-changed-cf26202b-41b4-4bff-9c95-06665d78a98f {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1114.758731] env[61649]: DEBUG nova.compute.manager [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Refreshing instance network info cache due to event network-changed-cf26202b-41b4-4bff-9c95-06665d78a98f. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1114.758924] env[61649]: DEBUG oslo_concurrency.lockutils [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Acquiring lock "refresh_cache-5dc4bde6-db61-47c2-a2b8-d2a5515b1525" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.759062] env[61649]: DEBUG oslo_concurrency.lockutils [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Acquired lock "refresh_cache-5dc4bde6-db61-47c2-a2b8-d2a5515b1525" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.759235] env[61649]: DEBUG nova.network.neutron [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Refreshing network info cache for port cf26202b-41b4-4bff-9c95-06665d78a98f {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1114.759715] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Expecting reply to msg 8d64db4d387042b59befd0e9dd0ca86a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1114.766922] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d64db4d387042b59befd0e9dd0ca86a [ 1115.033497] env[61649]: DEBUG nova.network.neutron [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Updated VIF entry in instance network info cache for port cf26202b-41b4-4bff-9c95-06665d78a98f. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1115.033859] env[61649]: DEBUG nova.network.neutron [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Updating instance_info_cache with network_info: [{"id": "cf26202b-41b4-4bff-9c95-06665d78a98f", "address": "fa:16:3e:a0:c4:f8", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf26202b-41", "ovs_interfaceid": "cf26202b-41b4-4bff-9c95-06665d78a98f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.034369] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Expecting reply to msg b9913dc519d748cc96899d656c0d3a41 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1115.042676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9913dc519d748cc96899d656c0d3a41 [ 1115.043232] env[61649]: DEBUG oslo_concurrency.lockutils [req-1598e232-b059-4748-845a-aa01e1e33b2e req-6f509842-a61f-4d94-a801-766d818e52bd service nova] Releasing lock "refresh_cache-5dc4bde6-db61-47c2-a2b8-d2a5515b1525" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.091991] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158197, 'name': CreateVM_Task} progress is 25%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.590976] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158197, 'name': CreateVM_Task, 'duration_secs': 0.835951} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.591261] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1115.591815] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.591973] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.592300] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1115.592541] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed388f2b-05a1-453b-adf2-fb643c2b4e6d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.596494] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1115.596494] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52013e5c-ea1a-7422-899d-548254a5d6ce" [ 1115.596494] env[61649]: _type = "Task" [ 1115.596494] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.603297] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52013e5c-ea1a-7422-899d-548254a5d6ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.107332] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.107589] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1116.107808] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.929476] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.929846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a887f998dad14c539c648828d056593e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1125.935620] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a887f998dad14c539c648828d056593e [ 1126.935507] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.929759] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.929925] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 1127.930272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 69adee1ac0b143e18a93c542a86054fa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1127.936988] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69adee1ac0b143e18a93c542a86054fa [ 1127.986071] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.986647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a9a43101438f4897b7469a6ce9e382f3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1128.004518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9a43101438f4897b7469a6ce9e382f3 [ 1128.006795] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 1128.006795] env[61649]: value = "domain-c8" [ 1128.006795] env[61649]: _type = "ClusterComputeResource" [ 1128.006795] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1128.008066] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964c0d2f-0ea3-41eb-ab48-04ace6511b14 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.025012] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 10 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1128.025182] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid bf8c692f-6510-4548-aedd-0e1792512e20 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.025373] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid e5fe92cf-e150-419f-a164-a98a9d24dd8c {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.025533] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 95426048-d403-4dad-9ad7-b76de655a319 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.025727] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid a0db1e96-4ca4-4fed-b86b-d8457f3570a9 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.025827] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 29f84900-0805-4ab2-af4d-bd7be2ac94d3 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.025975] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid bf2399eb-b2df-43b3-bddd-48692825c40a {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.026118] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 6ab197e9-3e38-4b37-b625-c30b6977261a {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.026260] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid c9fe1bfe-e813-43e9-9668-b813416ee27b {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.026403] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.026544] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1128.026839] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "bf8c692f-6510-4548-aedd-0e1792512e20" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.027061] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.027279] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "95426048-d403-4dad-9ad7-b76de655a319" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.027500] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.027717] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.027950] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "bf2399eb-b2df-43b3-bddd-48692825c40a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.028124] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "6ab197e9-3e38-4b37-b625-c30b6977261a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.028334] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.028537] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.028738] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.972599] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.972866] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.924594] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.925560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 55948f678d734efa89c40c71e7065d70 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1130.942985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55948f678d734efa89c40c71e7065d70 [ 1130.946680] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.946680] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.946680] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1130.979585] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 58c3aaf39e2246ebb5852570f28d59ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1130.988410] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58c3aaf39e2246ebb5852570f28d59ac [ 1131.929677] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.929932] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.930072] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1131.930188] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1131.931181] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8e75a03d3631495b8b4d174193ddb80c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1131.953142] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e75a03d3631495b8b4d174193ddb80c [ 1131.956031] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956031] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956031] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956031] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956031] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956382] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956382] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956382] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956382] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956596] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1131.956685] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1131.957197] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.930763] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.931150] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 77f1060d86ce424dac0df8cd5be8a5f8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1132.940905] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77f1060d86ce424dac0df8cd5be8a5f8 [ 1132.941867] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.942060] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.942216] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.942362] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1132.943406] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b0e5f9-7165-4405-b453-82bf215dd62d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.951971] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6466d55-17be-446f-b185-4afb1898a03a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.965657] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3e2b43-3d14-4ca6-b1f7-74fd3cd80c4b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.971761] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dfd524-fb0d-4498-99c6-1188c8d0c9ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.000036] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181801MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1133.000191] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.000355] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.001177] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 436056285816440283756ddd33b72233 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.036276] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 436056285816440283756ddd33b72233 [ 1133.040660] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cf616c3aa3754a00b215a4bf05a2db7a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.048761] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf616c3aa3754a00b215a4bf05a2db7a [ 1133.068667] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf8c692f-6510-4548-aedd-0e1792512e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.068870] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.069035] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.069190] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.069395] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.069558] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.069710] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.069862] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.070009] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.070155] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.070727] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 724fd7cbd944467cb7111c4b2a010d22 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.080932] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 724fd7cbd944467cb7111c4b2a010d22 [ 1133.081622] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.082125] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cba3a9496b964fe5a75de1575a395881 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.091199] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cba3a9496b964fe5a75de1575a395881 [ 1133.091891] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0534f500-d8d8-4aad-896c-c965778c3a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.092403] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d9a218f87a5945c791c4fb54cb896fff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.101536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9a218f87a5945c791c4fb54cb896fff [ 1133.102199] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6d3ee887-6b6b-4199-aea6-f0de0153e5c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.102949] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg aba5902ecadb454cbec3b4fa860eec59 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.112025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aba5902ecadb454cbec3b4fa860eec59 [ 1133.112373] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 82ae439e-5fe0-4bed-b550-e34929c139f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.112794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 483a72f0a27643d8bea325925619e159 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.121077] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 483a72f0a27643d8bea325925619e159 [ 1133.121789] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 40b216ba-3afd-4cfe-b98b-c5de03501317 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.122201] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 546245cd319e47fc9a73247f7307a3ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.130536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 546245cd319e47fc9a73247f7307a3ac [ 1133.131160] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a12bff02-f7da-43a1-b614-beb3d6908e0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.131548] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2a879f67bd3b40e0b0690468109f59ee in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.139467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a879f67bd3b40e0b0690468109f59ee [ 1133.140088] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2b4cc40e-a6fc-48df-baaf-f74352c24408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.140490] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a194135e7f944f71b3999b2afb194cca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.148667] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a194135e7f944f71b3999b2afb194cca [ 1133.149326] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.149739] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 935f98942200464a99f2211c7a077581 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.159017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 935f98942200464a99f2211c7a077581 [ 1133.159712] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 71c15bd8-5786-4d44-aa0e-3249b272ac72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.160137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bc555d43e01a4841b1190d18561f279f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.168765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc555d43e01a4841b1190d18561f279f [ 1133.169396] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d7adbc9f-af82-4f99-8536-4411665e3233 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.169912] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6106c060602a4e97ab5ad9abe30f7579 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.178727] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6106c060602a4e97ab5ad9abe30f7579 [ 1133.179378] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c7a35269-b314-4381-a8b4-d509d5627861 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.179814] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 39b7976a98944b7d96c2cf86a2ccf87f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.188647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39b7976a98944b7d96c2cf86a2ccf87f [ 1133.189287] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.189740] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f783c20ea4b843f4a92ee5aa1cd46b1e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.199194] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f783c20ea4b843f4a92ee5aa1cd46b1e [ 1133.199831] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.200069] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1133.200226] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1133.466624] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9408a59-5bc4-47bf-852a-89aad51c8789 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.473807] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebba621f-52a5-4475-a3ba-65b698c4e3fe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.501986] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ad4f55-8651-45d5-bfda-179adb3a37dc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.508749] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24112154-31a7-4c6e-834a-9524eae5d6d7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.521495] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.522027] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 17183dd071ad4687a5ea6d4dd1b4f35e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.528961] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17183dd071ad4687a5ea6d4dd1b4f35e [ 1133.529883] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1133.532139] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e9049e0e3a2045aab3eccdeaec0e5a9b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.542658] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9049e0e3a2045aab3eccdeaec0e5a9b [ 1133.543346] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1133.543587] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.543s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.543850] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.544053] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 1133.544632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 60f875436e964fa4a43d0583e953918b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1133.551895] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60f875436e964fa4a43d0583e953918b [ 1133.552369] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 1160.067795] env[61649]: WARNING oslo_vmware.rw_handles [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1160.067795] env[61649]: ERROR oslo_vmware.rw_handles [ 1160.068836] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1160.070325] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1160.070562] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Copying Virtual Disk [datastore1] vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/5d83ad50-744a-4e41-a0ea-1c1e9836f3c4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1160.070850] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d506a26-f2bf-493b-9271-2e00ab8c0c7e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.079675] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 1160.079675] env[61649]: value = "task-158198" [ 1160.079675] env[61649]: _type = "Task" [ 1160.079675] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.087194] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': task-158198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.589986] env[61649]: DEBUG oslo_vmware.exceptions [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1160.590302] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.590890] env[61649]: ERROR nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1160.590890] env[61649]: Faults: ['InvalidArgument'] [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Traceback (most recent call last): [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] yield resources [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self.driver.spawn(context, instance, image_meta, [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self._fetch_image_if_missing(context, vi) [ 1160.590890] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] image_cache(vi, tmp_image_ds_loc) [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] vm_util.copy_virtual_disk( [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] session._wait_for_task(vmdk_copy_task) [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] return self.wait_for_task(task_ref) [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] return evt.wait() [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] result = hub.switch() [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1160.591266] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] return self.greenlet.switch() [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self.f(*self.args, **self.kw) [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] raise exceptions.translate_fault(task_info.error) [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Faults: ['InvalidArgument'] [ 1160.591705] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] [ 1160.591705] env[61649]: INFO nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Terminating instance [ 1160.592852] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.593070] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.593304] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fde4cd0b-cf06-4631-a658-c6f51640da3b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.595722] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1160.595913] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1160.596653] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec295b45-0e91-4c40-a4f6-fbc3eef2551c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.603164] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1160.603420] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c588eae-40fc-4517-a633-e7308d81947b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.605764] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.605942] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1160.606874] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58945a52-7341-41dd-81ed-eced4396655c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.611672] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Waiting for the task: (returnval){ [ 1160.611672] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52985fd0-93b7-8248-df78-62b5c0b77c6f" [ 1160.611672] env[61649]: _type = "Task" [ 1160.611672] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.618323] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52985fd0-93b7-8248-df78-62b5c0b77c6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.121698] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1161.122050] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Creating directory with path [datastore1] vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.122450] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-582b3c38-cdc3-4ce2-b330-b35b4d65d8a6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.136036] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Created directory with path [datastore1] vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.136036] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Fetch image to [datastore1] vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1161.136036] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1161.136036] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e73e36-322a-4f9f-bc90-371831b813c9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.141585] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3865dd82-3ad6-4c03-882d-14527798fb06 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.150400] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af579f59-24af-4617-979d-e9e43e19c111 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.181535] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e372837-c854-4596-8b6d-f3cae3d8e82d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.187692] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1062b6a8-87d1-4345-8d09-c765aa9b99fa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.213979] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1161.315329] env[61649]: DEBUG oslo_vmware.rw_handles [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1161.374391] env[61649]: DEBUG oslo_vmware.rw_handles [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1161.374605] env[61649]: DEBUG oslo_vmware.rw_handles [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1164.556041] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1164.556309] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1164.556516] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Deleting the datastore file [datastore1] bf8c692f-6510-4548-aedd-0e1792512e20 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1164.556782] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-838e3e4e-d37e-4821-ac40-412c388dddf3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.563561] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for the task: (returnval){ [ 1164.563561] env[61649]: value = "task-158200" [ 1164.563561] env[61649]: _type = "Task" [ 1164.563561] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.570990] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': task-158200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.073519] env[61649]: DEBUG oslo_vmware.api [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Task: {'id': task-158200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095536} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.073761] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1165.073960] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1165.074136] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1165.074310] env[61649]: INFO nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Took 4.48 seconds to destroy the instance on the hypervisor. [ 1165.076900] env[61649]: DEBUG nova.compute.claims [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1165.077111] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.077397] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.079207] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg c5d0169d37ec43079fb5ecf9c8cee65c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.111551] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5d0169d37ec43079fb5ecf9c8cee65c [ 1165.189016] env[61649]: DEBUG nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1165.202038] env[61649]: DEBUG nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1165.202258] env[61649]: DEBUG nova.compute.provider_tree [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1165.213024] env[61649]: DEBUG nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1165.229645] env[61649]: DEBUG nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1165.471619] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33005c7-5982-4976-88a7-5649585e2f56 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.479083] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37148d35-d4b4-402a-a5e5-fd180e587349 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.508128] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70801b3-674d-445e-b50f-2d48fa5f3f6e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.514983] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad73472b-7734-4d73-bcb5-d71cbe7b8814 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.527657] env[61649]: DEBUG nova.compute.provider_tree [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.528355] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 4cda5367fa1446deb759365e0b5e06f4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.536381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cda5367fa1446deb759365e0b5e06f4 [ 1165.537515] env[61649]: DEBUG nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1165.540239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 2d50d52ac1f644548a4e4d5683c6a30c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.552288] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d50d52ac1f644548a4e4d5683c6a30c [ 1165.552996] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.476s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.553525] env[61649]: ERROR nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1165.553525] env[61649]: Faults: ['InvalidArgument'] [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Traceback (most recent call last): [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self.driver.spawn(context, instance, image_meta, [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self._fetch_image_if_missing(context, vi) [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] image_cache(vi, tmp_image_ds_loc) [ 1165.553525] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] vm_util.copy_virtual_disk( [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] session._wait_for_task(vmdk_copy_task) [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] return self.wait_for_task(task_ref) [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] return evt.wait() [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] result = hub.switch() [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] return self.greenlet.switch() [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1165.553855] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] self.f(*self.args, **self.kw) [ 1165.554205] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1165.554205] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] raise exceptions.translate_fault(task_info.error) [ 1165.554205] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1165.554205] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Faults: ['InvalidArgument'] [ 1165.554205] env[61649]: ERROR nova.compute.manager [instance: bf8c692f-6510-4548-aedd-0e1792512e20] [ 1165.554205] env[61649]: DEBUG nova.compute.utils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1165.555755] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Build of instance bf8c692f-6510-4548-aedd-0e1792512e20 was re-scheduled: A specified parameter was not correct: fileType [ 1165.555755] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1165.556145] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1165.556395] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1165.556486] env[61649]: DEBUG nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1165.556649] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1165.909838] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 888b71eac2b84bbdb26ba96b2e93484e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.917771] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 888b71eac2b84bbdb26ba96b2e93484e [ 1165.918180] env[61649]: DEBUG nova.network.neutron [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.918653] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 9f109c02145140f0856c93a7ba511895 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.927519] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f109c02145140f0856c93a7ba511895 [ 1165.928123] env[61649]: INFO nova.compute.manager [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Took 0.37 seconds to deallocate network for instance. [ 1165.929846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg aeee809c655645b78e66d4aede7454ad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.962300] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeee809c655645b78e66d4aede7454ad [ 1165.965192] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg af2be18eef78472fad758932eaebf337 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1165.995481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af2be18eef78472fad758932eaebf337 [ 1166.019902] env[61649]: INFO nova.scheduler.client.report [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Deleted allocations for instance bf8c692f-6510-4548-aedd-0e1792512e20 [ 1166.026233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 661f807cd91148d8b14a2b255b4ad773 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.039599] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 661f807cd91148d8b14a2b255b4ad773 [ 1166.040149] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8d7dca7d-89a7-4ee7-ac57-31f24b654f55 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "bf8c692f-6510-4548-aedd-0e1792512e20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 580.746s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.040729] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg a929ed4a26f346dc98ed7ea4d9bff23a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.041452] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "bf8c692f-6510-4548-aedd-0e1792512e20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 382.262s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.041691] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Acquiring lock "bf8c692f-6510-4548-aedd-0e1792512e20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.041891] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "bf8c692f-6510-4548-aedd-0e1792512e20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.042054] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "bf8c692f-6510-4548-aedd-0e1792512e20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.043964] env[61649]: INFO nova.compute.manager [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Terminating instance [ 1166.045587] env[61649]: DEBUG nova.compute.manager [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1166.045781] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1166.046241] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f5439da-5631-4606-9391-6663dc6aa9db {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.055144] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cf8679-3019-4dee-b618-67f40a0f0613 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.066692] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a929ed4a26f346dc98ed7ea4d9bff23a [ 1166.067180] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1166.068824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg bf9d5e051b8e493db50eaed7452640bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.087923] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf8c692f-6510-4548-aedd-0e1792512e20 could not be found. [ 1166.088125] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1166.088313] env[61649]: INFO nova.compute.manager [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1166.088555] env[61649]: DEBUG oslo.service.loopingcall [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1166.088771] env[61649]: DEBUG nova.compute.manager [-] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1166.088863] env[61649]: DEBUG nova.network.neutron [-] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1166.097670] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf9d5e051b8e493db50eaed7452640bc [ 1166.107418] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a2d4516f90724bdc8e30460814d72332 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.111742] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.111999] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.113398] env[61649]: INFO nova.compute.claims [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1166.114925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 71813c920fcb4448ae25869573cb5e3d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.115860] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2d4516f90724bdc8e30460814d72332 [ 1166.116181] env[61649]: DEBUG nova.network.neutron [-] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.116492] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cefdb52775d34dadb07b547a8d602564 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.123455] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cefdb52775d34dadb07b547a8d602564 [ 1166.123850] env[61649]: INFO nova.compute.manager [-] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] Took 0.03 seconds to deallocate network for instance. [ 1166.126966] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 742d8c8cbc3a4b979a52296eecb3f1d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.158647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 742d8c8cbc3a4b979a52296eecb3f1d3 [ 1166.165472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71813c920fcb4448ae25869573cb5e3d [ 1166.166961] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 2b1ebbb3a0c14816b0ca2ca84b67b69e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.171430] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 2ff8032377b64d12ba4bdf5496bcad0d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.176616] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b1ebbb3a0c14816b0ca2ca84b67b69e [ 1166.210473] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ff8032377b64d12ba4bdf5496bcad0d [ 1166.211022] env[61649]: DEBUG oslo_concurrency.lockutils [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Lock "bf8c692f-6510-4548-aedd-0e1792512e20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.211411] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-77ddaa6d-7a39-4fb5-8274-0f0e29156f39 tempest-ServersAdminTestJSON-1522131393 tempest-ServersAdminTestJSON-1522131393-project-member] Expecting reply to msg 29ce1a6728394f63bde7196ff2ad07ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1166.212153] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "bf8c692f-6510-4548-aedd-0e1792512e20" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 38.185s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.212655] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf8c692f-6510-4548-aedd-0e1792512e20] During sync_power_state the instance has a pending task (deleting). Skip. [ 1166.212910] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "bf8c692f-6510-4548-aedd-0e1792512e20" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.221601] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29ce1a6728394f63bde7196ff2ad07ce [ 1166.419073] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1bf31e-41be-426b-a966-a1c86e7e337d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.426085] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7a8638-b6c6-4577-91ea-9ce948b43d30 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.128433] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a15604-9ff9-4af6-8841-5829a4fc55e0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.132487] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 038940982359484ead3cf5c97683ec22 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.138994] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40a504a-c753-46c9-8461-915ce2613ffa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.142846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 038940982359484ead3cf5c97683ec22 [ 1167.143627] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.153380] env[61649]: DEBUG nova.compute.provider_tree [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.153822] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 8e9a4d9dcc9e4cb6a14d96c1de376586 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.160603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e9a4d9dcc9e4cb6a14d96c1de376586 [ 1167.161434] env[61649]: DEBUG nova.scheduler.client.report [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1167.163648] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 0defa7a049314795acd423a2b2d645ef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.175916] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0defa7a049314795acd423a2b2d645ef [ 1167.176612] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.065s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.177080] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1167.178669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg a793e2d314a24aeb928ce40fe34a28ee in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.207394] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a793e2d314a24aeb928ce40fe34a28ee [ 1167.208734] env[61649]: DEBUG nova.compute.utils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1167.209408] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 3c52930d4e5d40a1b754799ab18339a6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.210357] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1167.210519] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1167.218221] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c52930d4e5d40a1b754799ab18339a6 [ 1167.218731] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1167.220319] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 103480e04ee740159851dd57a7a47273 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.251230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 103480e04ee740159851dd57a7a47273 [ 1167.253751] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg d6928aa463d348e3b1b4a6e11c20c7d0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1167.256059] env[61649]: DEBUG nova.policy [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b43cfdcc02c04da585f709b8bdf1263e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8df702e7627e455e9cd7ea3688bd26af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1167.282612] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6928aa463d348e3b1b4a6e11c20c7d0 [ 1167.283713] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1167.304915] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1167.305159] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1167.305318] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.305500] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1167.305647] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.305792] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1167.305999] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1167.306161] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1167.306326] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1167.306487] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1167.306656] env[61649]: DEBUG nova.virt.hardware [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1167.307614] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2004decd-464b-43d6-b3f3-c38d864aa21f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.315063] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bdc4dd5-f822-4211-8d60-8d6043b52720 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.720069] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Successfully created port: 7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1168.304302] env[61649]: DEBUG nova.compute.manager [req-79ed0d08-5ed1-44d0-8fcb-e75dcd554476 req-bdd234f4-704e-4b4d-9207-3c4f1a234694 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Received event network-vif-plugged-7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1168.304550] env[61649]: DEBUG oslo_concurrency.lockutils [req-79ed0d08-5ed1-44d0-8fcb-e75dcd554476 req-bdd234f4-704e-4b4d-9207-3c4f1a234694 service nova] Acquiring lock "5730229a-fd0c-4df1-9059-cd6ed39e954c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.304731] env[61649]: DEBUG oslo_concurrency.lockutils [req-79ed0d08-5ed1-44d0-8fcb-e75dcd554476 req-bdd234f4-704e-4b4d-9207-3c4f1a234694 service nova] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.304897] env[61649]: DEBUG oslo_concurrency.lockutils [req-79ed0d08-5ed1-44d0-8fcb-e75dcd554476 req-bdd234f4-704e-4b4d-9207-3c4f1a234694 service nova] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.305059] env[61649]: DEBUG nova.compute.manager [req-79ed0d08-5ed1-44d0-8fcb-e75dcd554476 req-bdd234f4-704e-4b4d-9207-3c4f1a234694 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] No waiting events found dispatching network-vif-plugged-7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1168.305218] env[61649]: WARNING nova.compute.manager [req-79ed0d08-5ed1-44d0-8fcb-e75dcd554476 req-bdd234f4-704e-4b4d-9207-3c4f1a234694 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Received unexpected event network-vif-plugged-7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 for instance with vm_state building and task_state spawning. [ 1168.381727] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Successfully updated port: 7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.382281] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg f4a5c542bf304bd1860d3d1eead3e8f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1168.390647] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4a5c542bf304bd1860d3d1eead3e8f7 [ 1168.391371] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "refresh_cache-5730229a-fd0c-4df1-9059-cd6ed39e954c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.391593] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquired lock "refresh_cache-5730229a-fd0c-4df1-9059-cd6ed39e954c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.391829] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1168.392392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg de023535840045168ba4da0fd87e1e24 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1168.399226] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de023535840045168ba4da0fd87e1e24 [ 1168.430969] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1168.566101] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Updating instance_info_cache with network_info: [{"id": "7afd9abc-cd6c-473e-a8d5-5d185ee0ec78", "address": "fa:16:3e:e4:20:7c", "network": {"id": "3da5cb0f-8538-4698-aa01-b09b01870924", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-183831633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8df702e7627e455e9cd7ea3688bd26af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afd9abc-cd", "ovs_interfaceid": "7afd9abc-cd6c-473e-a8d5-5d185ee0ec78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.566708] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 13dc357e699a463c9706f217d07111e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1168.577673] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13dc357e699a463c9706f217d07111e7 [ 1168.577673] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Releasing lock "refresh_cache-5730229a-fd0c-4df1-9059-cd6ed39e954c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.577888] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance network_info: |[{"id": "7afd9abc-cd6c-473e-a8d5-5d185ee0ec78", "address": "fa:16:3e:e4:20:7c", "network": {"id": "3da5cb0f-8538-4698-aa01-b09b01870924", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-183831633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8df702e7627e455e9cd7ea3688bd26af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afd9abc-cd", "ovs_interfaceid": "7afd9abc-cd6c-473e-a8d5-5d185ee0ec78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1168.577982] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:20:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35fcdc55-dc29-451b-ad56-3a03b044dc81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7afd9abc-cd6c-473e-a8d5-5d185ee0ec78', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1168.585110] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Creating folder: Project (8df702e7627e455e9cd7ea3688bd26af). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1168.585926] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5ffc82d-7461-489d-8bc1-69612bf77ee4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.596805] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Created folder: Project (8df702e7627e455e9cd7ea3688bd26af) in parent group-v51588. [ 1168.596987] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Creating folder: Instances. Parent ref: group-v51658. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1168.597795] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5500f6d-d3af-4101-8800-17b4bf8a5e7c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.605361] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Created folder: Instances in parent group-v51658. [ 1168.605577] env[61649]: DEBUG oslo.service.loopingcall [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1168.605751] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1168.605935] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85e8d6a6-9dea-4185-aad6-c85a973341c5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.624601] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1168.624601] env[61649]: value = "task-158203" [ 1168.624601] env[61649]: _type = "Task" [ 1168.624601] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.631967] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158203, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.134061] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158203, 'name': CreateVM_Task, 'duration_secs': 0.289883} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.134219] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1169.134882] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.135039] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.135358] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1169.135595] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a7732e3-02f8-474e-922b-f81ea558f435 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.140130] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Waiting for the task: (returnval){ [ 1169.140130] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52dec787-2512-1749-9782-f7455a25f987" [ 1169.140130] env[61649]: _type = "Task" [ 1169.140130] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.154720] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.154952] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1169.155282] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.357322] env[61649]: DEBUG nova.compute.manager [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Received event network-changed-7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1170.357578] env[61649]: DEBUG nova.compute.manager [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Refreshing instance network info cache due to event network-changed-7afd9abc-cd6c-473e-a8d5-5d185ee0ec78. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1170.357779] env[61649]: DEBUG oslo_concurrency.lockutils [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] Acquiring lock "refresh_cache-5730229a-fd0c-4df1-9059-cd6ed39e954c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.357921] env[61649]: DEBUG oslo_concurrency.lockutils [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] Acquired lock "refresh_cache-5730229a-fd0c-4df1-9059-cd6ed39e954c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.358078] env[61649]: DEBUG nova.network.neutron [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Refreshing network info cache for port 7afd9abc-cd6c-473e-a8d5-5d185ee0ec78 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1170.358573] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] Expecting reply to msg a01285eb85de49d687a983ee3091d43c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1170.365996] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a01285eb85de49d687a983ee3091d43c [ 1170.594188] env[61649]: DEBUG nova.network.neutron [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Updated VIF entry in instance network info cache for port 7afd9abc-cd6c-473e-a8d5-5d185ee0ec78. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1170.594545] env[61649]: DEBUG nova.network.neutron [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Updating instance_info_cache with network_info: [{"id": "7afd9abc-cd6c-473e-a8d5-5d185ee0ec78", "address": "fa:16:3e:e4:20:7c", "network": {"id": "3da5cb0f-8538-4698-aa01-b09b01870924", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-183831633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8df702e7627e455e9cd7ea3688bd26af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afd9abc-cd", "ovs_interfaceid": "7afd9abc-cd6c-473e-a8d5-5d185ee0ec78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.595060] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] Expecting reply to msg b73d44b4cea74156b8940e4682720dc6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1170.604696] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b73d44b4cea74156b8940e4682720dc6 [ 1170.605313] env[61649]: DEBUG oslo_concurrency.lockutils [req-c08257d1-d0ee-4547-ae53-410ba3c30d83 req-543fe878-0c49-40b2-b98f-acb61ea13913 service nova] Releasing lock "refresh_cache-5730229a-fd0c-4df1-9059-cd6ed39e954c" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.709627] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 069f642a4a354e019e68df9eb183e477 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1177.718286] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 069f642a4a354e019e68df9eb183e477 [ 1177.718766] env[61649]: DEBUG oslo_concurrency.lockutils [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "5730229a-fd0c-4df1-9059-cd6ed39e954c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.990568] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "aa39503a-2342-421e-928f-35ec7c8e47fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.990864] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.550940] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.929685] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.928515] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.928694] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1191.924640] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.928277] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.928425] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1191.928539] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1191.929113] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 448ded14bb964afeaa2659289f11c2e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1191.945899] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 448ded14bb964afeaa2659289f11c2e0 [ 1191.948040] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948240] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948326] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948454] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948578] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948700] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948820] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.948936] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.949051] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.949165] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1191.949294] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1191.949782] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.929900] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.930182] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.929722] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.930081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d50a313fdb3c4e3db9f0592fcd1c61a2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1193.939382] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d50a313fdb3c4e3db9f0592fcd1c61a2 [ 1193.940347] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.940557] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.940716] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.940867] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1193.941916] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e978fe0-aed2-41c3-9a9d-4d4638eab146 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.950688] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cf2902-f3c8-4f3d-ba8d-48e3db58e35f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.966276] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9c5705-8e99-4bb7-adba-87eb83006f1c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.972363] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06104a34-a975-4407-939d-26a84ec402f5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.000988] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181796MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1194.001221] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.001472] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.002332] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b5b4edb4d5fd436e89f78761ed44c9f4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.037408] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5b4edb4d5fd436e89f78761ed44c9f4 [ 1194.042484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 463afe7a65ac4d209fcf218f3f3d6d5c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.051858] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 463afe7a65ac4d209fcf218f3f3d6d5c [ 1194.069886] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070036] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070184] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070295] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070421] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070533] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070646] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070757] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.070866] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.071505] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.071505] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7a28f8a9026440a9bdef9fe82232bf01 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.081100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a28f8a9026440a9bdef9fe82232bf01 [ 1194.081824] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 82ae439e-5fe0-4bed-b550-e34929c139f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.082304] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0f1db7d5773a427fae6d2b82d227026b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.093565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f1db7d5773a427fae6d2b82d227026b [ 1194.094280] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 40b216ba-3afd-4cfe-b98b-c5de03501317 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.094741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 710159fa5b1f49aebab6a6433d7be57c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.103456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 710159fa5b1f49aebab6a6433d7be57c [ 1194.104750] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a12bff02-f7da-43a1-b614-beb3d6908e0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.104750] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5b5871574e0c41c2a28728d6964dc33d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.114813] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b5871574e0c41c2a28728d6964dc33d [ 1194.115453] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 2b4cc40e-a6fc-48df-baaf-f74352c24408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.115886] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6056b56125a64438ad9fc84d7bc3f210 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.124843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6056b56125a64438ad9fc84d7bc3f210 [ 1194.125479] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.125924] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 101401b65250451db600efd86ebce746 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.135697] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 101401b65250451db600efd86ebce746 [ 1194.136597] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 71c15bd8-5786-4d44-aa0e-3249b272ac72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.137016] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c491990449214d708757e5702db683c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.146476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c491990449214d708757e5702db683c8 [ 1194.147082] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d7adbc9f-af82-4f99-8536-4411665e3233 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.147552] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 22e233eb44594f67b90d8e2bf831cb98 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.158144] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22e233eb44594f67b90d8e2bf831cb98 [ 1194.158144] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c7a35269-b314-4381-a8b4-d509d5627861 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.158594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4dcbec7340724d19aa443685a0102ea3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.167703] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dcbec7340724d19aa443685a0102ea3 [ 1194.168342] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.168779] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 30be74fde7074b5a9b39b3bc42f9baea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.177185] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30be74fde7074b5a9b39b3bc42f9baea [ 1194.177756] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.178160] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 04ad66c148124da28d2bceb16034dcc5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.186752] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04ad66c148124da28d2bceb16034dcc5 [ 1194.187308] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.187526] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1194.187669] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1194.473166] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41bd213-1aac-4183-8306-12b8118aa1d3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.480401] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc9a771-acee-42b7-95d7-546b2665e46d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.508972] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7d9208-c375-4f6e-a6ef-f61bbc0f10f7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.515405] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad351bad-6915-41f4-87ae-256f5750bd84 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.528376] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.528824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f29d3790b0844be99fe98fa6b0c3c340 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.536056] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f29d3790b0844be99fe98fa6b0c3c340 [ 1194.536894] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1194.539131] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3d7b24928d6746588ca2dc48961a2ac8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1194.552276] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d7b24928d6746588ca2dc48961a2ac8 [ 1194.552929] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1194.553099] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.552s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.737308] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.737753] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.033752] env[61649]: WARNING oslo_vmware.rw_handles [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1210.033752] env[61649]: ERROR oslo_vmware.rw_handles [ 1210.034701] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1210.036472] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1210.036727] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Copying Virtual Disk [datastore1] vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/19eccc88-4aff-4138-b06b-094b93b631a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1210.037018] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfcc49c1-80b5-4659-907e-fdfc12cb57c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.044897] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Waiting for the task: (returnval){ [ 1210.044897] env[61649]: value = "task-158204" [ 1210.044897] env[61649]: _type = "Task" [ 1210.044897] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.054027] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Task: {'id': task-158204, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.555740] env[61649]: DEBUG oslo_vmware.exceptions [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1210.557240] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.557858] env[61649]: ERROR nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1210.557858] env[61649]: Faults: ['InvalidArgument'] [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] yield resources [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.driver.spawn(context, instance, image_meta, [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._fetch_image_if_missing(context, vi) [ 1210.557858] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] image_cache(vi, tmp_image_ds_loc) [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] vm_util.copy_virtual_disk( [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] session._wait_for_task(vmdk_copy_task) [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.wait_for_task(task_ref) [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return evt.wait() [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] result = hub.switch() [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1210.558373] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.greenlet.switch() [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.f(*self.args, **self.kw) [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise exceptions.translate_fault(task_info.error) [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Faults: ['InvalidArgument'] [ 1210.558823] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1210.558823] env[61649]: INFO nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Terminating instance [ 1210.559921] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.560078] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.560701] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1210.560878] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1210.561100] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1ec62bb-d9dc-4235-a403-7596584390fa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.564313] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beabb179-6fcd-47f8-8092-b608a5e83431 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.571677] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1210.571899] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c06c7d05-3aa5-4f39-8fa3-8bec0d95f21d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.574220] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.574403] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1210.575382] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8110bf17-0876-4953-9e70-0f8292eccdf8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.580171] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Waiting for the task: (returnval){ [ 1210.580171] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52981923-21cf-0afc-0618-ab352e7caab1" [ 1210.580171] env[61649]: _type = "Task" [ 1210.580171] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.587862] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52981923-21cf-0afc-0618-ab352e7caab1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.636492] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1210.636694] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1210.636873] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Deleting the datastore file [datastore1] e5fe92cf-e150-419f-a164-a98a9d24dd8c {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.637129] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69a67fe3-89f9-46d0-b578-52a7675c6919 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.648374] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Waiting for the task: (returnval){ [ 1210.648374] env[61649]: value = "task-158206" [ 1210.648374] env[61649]: _type = "Task" [ 1210.648374] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.656299] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Task: {'id': task-158206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.091727] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1211.092524] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Creating directory with path [datastore1] vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1211.093573] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e766cfaa-55ff-4f99-8c66-25323d11c4d7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.137478] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Created directory with path [datastore1] vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1211.137478] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Fetch image to [datastore1] vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1211.137478] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1211.137478] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b599d52b-48b4-447e-937c-053d78b02d63 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.141196] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be45343-fb04-4b38-982d-6a180aeb2836 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.153576] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0450752e-118d-4e61-9f3c-29a8246904df {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.163299] env[61649]: DEBUG oslo_vmware.api [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Task: {'id': task-158206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086015} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.187366] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.187587] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1211.187750] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1211.187923] env[61649]: INFO nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1211.190341] env[61649]: DEBUG nova.compute.claims [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1211.190512] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.190740] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.192655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 9b00ffa01085439abf86351bf8e71cb4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1211.194038] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053606e6-02ff-4270-9c8c-5c1b958ec89f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.200411] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-25d80771-9655-4bcb-9e8f-0930d5e058db {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.227588] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1211.237956] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b00ffa01085439abf86351bf8e71cb4 [ 1211.441312] env[61649]: DEBUG oslo_vmware.rw_handles [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1211.509191] env[61649]: DEBUG oslo_vmware.rw_handles [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1211.509497] env[61649]: DEBUG oslo_vmware.rw_handles [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1211.593156] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a1f6c6-51d4-4c4c-bb9d-94659c7e9563 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.600845] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74f4c7b-97ed-4be6-a391-dcabdb3825aa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.636316] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9bf84a-b3bc-4aec-a979-a84985af35f6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.646676] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6448475a-a7ba-4b4f-a7e4-8598c7391e71 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.660422] env[61649]: DEBUG nova.compute.provider_tree [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.660997] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 5e4785b94fcc454cbbe0429d1b8b4297 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1211.671722] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e4785b94fcc454cbbe0429d1b8b4297 [ 1211.672699] env[61649]: DEBUG nova.scheduler.client.report [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1211.675061] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 6f391ddc657340adbdc7a9bb8e009b51 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1211.689122] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f391ddc657340adbdc7a9bb8e009b51 [ 1211.690945] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.499s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.690945] env[61649]: ERROR nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1211.690945] env[61649]: Faults: ['InvalidArgument'] [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.driver.spawn(context, instance, image_meta, [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1211.690945] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._fetch_image_if_missing(context, vi) [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] image_cache(vi, tmp_image_ds_loc) [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] vm_util.copy_virtual_disk( [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] session._wait_for_task(vmdk_copy_task) [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.wait_for_task(task_ref) [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return evt.wait() [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] result = hub.switch() [ 1211.691275] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.greenlet.switch() [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.f(*self.args, **self.kw) [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise exceptions.translate_fault(task_info.error) [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Faults: ['InvalidArgument'] [ 1211.691644] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.691644] env[61649]: DEBUG nova.compute.utils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1211.694024] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Build of instance e5fe92cf-e150-419f-a164-a98a9d24dd8c was re-scheduled: A specified parameter was not correct: fileType [ 1211.694024] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1211.694024] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1211.694024] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1211.694024] env[61649]: DEBUG nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1211.694273] env[61649]: DEBUG nova.network.neutron [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1211.854658] env[61649]: DEBUG neutronclient.v2_0.client [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1211.856763] env[61649]: ERROR nova.compute.manager [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.driver.spawn(context, instance, image_meta, [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._fetch_image_if_missing(context, vi) [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] image_cache(vi, tmp_image_ds_loc) [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1211.856763] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] vm_util.copy_virtual_disk( [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] session._wait_for_task(vmdk_copy_task) [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.wait_for_task(task_ref) [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return evt.wait() [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] result = hub.switch() [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.greenlet.switch() [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.f(*self.args, **self.kw) [ 1211.857161] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise exceptions.translate_fault(task_info.error) [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Faults: ['InvalidArgument'] [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] During handling of the above exception, another exception occurred: [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._build_and_run_instance(context, instance, image, [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise exception.RescheduledException( [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] nova.exception.RescheduledException: Build of instance e5fe92cf-e150-419f-a164-a98a9d24dd8c was re-scheduled: A specified parameter was not correct: fileType [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Faults: ['InvalidArgument'] [ 1211.857558] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] During handling of the above exception, another exception occurred: [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] exception_handler_v20(status_code, error_body) [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise client_exc(message=error_message, [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Neutron server returns request_ids: ['req-c0b37134-49c1-4c9b-8cc6-21e2cf80f0b2'] [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.857992] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] During handling of the above exception, another exception occurred: [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._deallocate_network(context, instance, requested_networks) [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.network_api.deallocate_for_instance( [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] data = neutron.list_ports(**search_opts) [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.list('ports', self.ports_path, retrieve_all, [ 1211.858386] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] for r in self._pagination(collection, path, **params): [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] res = self.get(path, params=params) [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.retry_request("GET", action, body=body, [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1211.858765] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.do_request(method, action, body=body, [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._handle_fault_response(status_code, replybody, resp) [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise exception.Unauthorized() [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] nova.exception.Unauthorized: Not authorized. [ 1211.859137] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1211.859137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg fe1a2db90bde424488473583fb969e63 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1211.893931] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe1a2db90bde424488473583fb969e63 [ 1211.912130] env[61649]: INFO nova.scheduler.client.report [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Deleted allocations for instance e5fe92cf-e150-419f-a164-a98a9d24dd8c [ 1211.919964] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg b2e5cb0936224292b7c749083207c5f5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1211.928316] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2e5cb0936224292b7c749083207c5f5 [ 1211.928882] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91dd62e8-b454-4b6f-a12a-356dc0e40f7c tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.768s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.929594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] Expecting reply to msg 93bda92a2dfe4840bc629c9416139c2d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1211.946818] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.933s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.947076] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Acquiring lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.947329] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.947591] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.950012] env[61649]: INFO nova.compute.manager [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Terminating instance [ 1211.952647] env[61649]: DEBUG nova.compute.manager [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1211.952813] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1211.953677] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d438284d-e788-4faf-8349-cf459047220f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.962983] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681dde2c-880a-4101-b147-71ce743ed404 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.983022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93bda92a2dfe4840bc629c9416139c2d [ 1211.983702] env[61649]: DEBUG nova.compute.manager [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] [instance: 0534f500-d8d8-4aad-896c-c965778c3a6f] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1211.986391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] Expecting reply to msg 38dd570817a747379cb8c25eed1e1762 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.011074] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e5fe92cf-e150-419f-a164-a98a9d24dd8c could not be found. [ 1212.011341] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1212.011540] env[61649]: INFO nova.compute.manager [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1212.011827] env[61649]: DEBUG oslo.service.loopingcall [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1212.012006] env[61649]: DEBUG nova.compute.manager [-] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1212.012139] env[61649]: DEBUG nova.network.neutron [-] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1212.014679] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38dd570817a747379cb8c25eed1e1762 [ 1212.015306] env[61649]: DEBUG nova.compute.manager [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] [instance: 0534f500-d8d8-4aad-896c-c965778c3a6f] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1212.015727] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] Expecting reply to msg 1ad70bdd40ee49bfa5ab1115a096ab29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.032209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ad70bdd40ee49bfa5ab1115a096ab29 [ 1212.041576] env[61649]: DEBUG oslo_concurrency.lockutils [None req-563dd667-8567-4ddb-b349-5bc02934cb87 tempest-ServerPasswordTestJSON-936204548 tempest-ServerPasswordTestJSON-936204548-project-member] Lock "0534f500-d8d8-4aad-896c-c965778c3a6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.367s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.042156] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] Expecting reply to msg cb45ee2b303e440dac79d476f5df15f9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.050708] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb45ee2b303e440dac79d476f5df15f9 [ 1212.051209] env[61649]: DEBUG nova.compute.manager [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] [instance: 6d3ee887-6b6b-4199-aea6-f0de0153e5c5] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1212.052930] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] Expecting reply to msg 7479fa834f1b4ba194a722121d18e37f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.080568] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7479fa834f1b4ba194a722121d18e37f [ 1212.081201] env[61649]: DEBUG nova.compute.manager [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] [instance: 6d3ee887-6b6b-4199-aea6-f0de0153e5c5] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1212.081580] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] Expecting reply to msg 419095ecb21f40569fd126d2ca972614 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.092456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 419095ecb21f40569fd126d2ca972614 [ 1212.104459] env[61649]: DEBUG oslo_concurrency.lockutils [None req-1284b3d6-4ce1-49ec-9c60-72e78b810975 tempest-ServerShowV257Test-221320248 tempest-ServerShowV257Test-221320248-project-member] Lock "6d3ee887-6b6b-4199-aea6-f0de0153e5c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.980s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.105046] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Expecting reply to msg 6c93a35ffc664fd68d071819920499ca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.114116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c93a35ffc664fd68d071819920499ca [ 1212.114583] env[61649]: DEBUG nova.compute.manager [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] [instance: 82ae439e-5fe0-4bed-b550-e34929c139f8] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1212.116249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Expecting reply to msg 950116d40e714538ab32f95ac91331f4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.151759] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 950116d40e714538ab32f95ac91331f4 [ 1212.152417] env[61649]: DEBUG nova.compute.manager [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] [instance: 82ae439e-5fe0-4bed-b550-e34929c139f8] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1212.152778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Expecting reply to msg ce1fae7f617e4f9e9fc666feac6725d4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.157147] env[61649]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1212.157395] env[61649]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-7f76d197-514b-4591-9ae8-324cf81d4b55'] [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1212.158082] env[61649]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1212.158639] env[61649]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.159220] env[61649]: ERROR oslo.service.loopingcall [ 1212.159729] env[61649]: ERROR nova.compute.manager [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.160171] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 72286b68374c48f48b567055f3043d60 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.169754] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce1fae7f617e4f9e9fc666feac6725d4 [ 1212.183478] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e375d8c7-cc0d-4ff5-adeb-759e225a88eb tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Lock "82ae439e-5fe0-4bed-b550-e34929c139f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.429s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.184075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] Expecting reply to msg c50222f246944cba9d009be62b99c7e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.195421] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c50222f246944cba9d009be62b99c7e5 [ 1212.195421] env[61649]: DEBUG nova.compute.manager [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] [instance: 40b216ba-3afd-4cfe-b98b-c5de03501317] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1212.197108] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] Expecting reply to msg 293ca025062740b9b8d6faa0f6399d4a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.198292] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72286b68374c48f48b567055f3043d60 [ 1212.200129] env[61649]: ERROR nova.compute.manager [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] exception_handler_v20(status_code, error_body) [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise client_exc(message=error_message, [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1212.200129] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Neutron server returns request_ids: ['req-7f76d197-514b-4591-9ae8-324cf81d4b55'] [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] During handling of the above exception, another exception occurred: [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Traceback (most recent call last): [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._delete_instance(context, instance, bdms) [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._shutdown_instance(context, instance, bdms) [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._try_deallocate_network(context, instance, requested_networks) [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] with excutils.save_and_reraise_exception(): [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1212.200477] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.force_reraise() [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise self.value [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] _deallocate_network_with_retries() [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return evt.wait() [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] result = hub.switch() [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.greenlet.switch() [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1212.200989] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] result = func(*self.args, **self.kw) [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] result = f(*args, **kwargs) [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._deallocate_network( [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self.network_api.deallocate_for_instance( [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] data = neutron.list_ports(**search_opts) [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.list('ports', self.ports_path, retrieve_all, [ 1212.201348] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] for r in self._pagination(collection, path, **params): [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] res = self.get(path, params=params) [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.retry_request("GET", action, body=body, [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1212.201762] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] return self.do_request(method, action, body=body, [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] ret = obj(*args, **kwargs) [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] self._handle_fault_response(status_code, replybody, resp) [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.202166] env[61649]: ERROR nova.compute.manager [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] [ 1212.202166] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg fe6857240e2246a7a989b07d0970c4ae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.224456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 293ca025062740b9b8d6faa0f6399d4a [ 1212.225240] env[61649]: DEBUG nova.compute.manager [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] [instance: 40b216ba-3afd-4cfe-b98b-c5de03501317] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1212.225376] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] Expecting reply to msg 3030d85ab50f4c7e9155b3721e04361e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.230179] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe6857240e2246a7a989b07d0970c4ae [ 1212.231426] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.285s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.231750] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg c2db0f2c00ae4f42b778b279bfce21d4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.232528] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 84.205s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.232709] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1212.232875] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "e5fe92cf-e150-419f-a164-a98a9d24dd8c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.234641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3030d85ab50f4c7e9155b3721e04361e [ 1212.238485] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2db0f2c00ae4f42b778b279bfce21d4 [ 1212.239852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 5ece1438f6934cd08b5fbfe5870298dd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.245883] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9dce6ced-8bdf-4805-8d98-3c93d9ce4e85 tempest-ServersTestBootFromVolume-1834891323 tempest-ServersTestBootFromVolume-1834891323-project-member] Lock "40b216ba-3afd-4cfe-b98b-c5de03501317" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.479s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.246393] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Expecting reply to msg 89dde81357d7419b982de3fd2dd46f5a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.255666] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ece1438f6934cd08b5fbfe5870298dd [ 1212.256203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89dde81357d7419b982de3fd2dd46f5a [ 1212.258096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Expecting reply to msg 4481cbc4dbb845888205acc84e997568 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.259069] env[61649]: DEBUG nova.compute.manager [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] [instance: a12bff02-f7da-43a1-b614-beb3d6908e0b] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1212.260667] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Expecting reply to msg c8d6ce2de8224695b2209d009e728e1b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.283842] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8d6ce2de8224695b2209d009e728e1b [ 1212.284567] env[61649]: DEBUG nova.compute.manager [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] [instance: a12bff02-f7da-43a1-b614-beb3d6908e0b] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1212.284957] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Expecting reply to msg 24120d57e12343419fb6dca2a5917803 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.292032] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4481cbc4dbb845888205acc84e997568 [ 1212.293691] env[61649]: INFO nova.compute.manager [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] [instance: e5fe92cf-e150-419f-a164-a98a9d24dd8c] Successfully reverted task state from None on failure for instance. [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server [None req-7887162d-7c3e-4eff-bcac-fbb705555f89 tempest-ServersAdminNegativeTestJSON-411079484 tempest-ServersAdminNegativeTestJSON-411079484-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-7f76d197-514b-4591-9ae8-324cf81d4b55'] [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1212.298144] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1212.298700] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1212.299284] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1212.299902] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1212.300495] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1212.301017] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1212.301521] env[61649]: ERROR oslo_messaging.rpc.server [ 1212.307654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24120d57e12343419fb6dca2a5917803 [ 1212.320995] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34b27cec-087a-43cd-a10e-57a88306aba8 tempest-ServerShowV247Test-277858123 tempest-ServerShowV247Test-277858123-project-member] Lock "a12bff02-f7da-43a1-b614-beb3d6908e0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.946s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.320995] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Expecting reply to msg 31ff259958ce4d9aa084def3bedc30af in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.333299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31ff259958ce4d9aa084def3bedc30af [ 1212.333299] env[61649]: DEBUG nova.compute.manager [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] [instance: 2b4cc40e-a6fc-48df-baaf-f74352c24408] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1212.333299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Expecting reply to msg 425ffc03acd340298c7b6b262d158a83 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.361326] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 425ffc03acd340298c7b6b262d158a83 [ 1212.361326] env[61649]: DEBUG nova.compute.manager [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] [instance: 2b4cc40e-a6fc-48df-baaf-f74352c24408] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1212.361326] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Expecting reply to msg c41de02987d840d5bc7176be3adfc439 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.376455] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c41de02987d840d5bc7176be3adfc439 [ 1212.387713] env[61649]: DEBUG oslo_concurrency.lockutils [None req-22910028-f533-4462-af14-ae913b898e19 tempest-ServersTestMultiNic-1885917485 tempest-ServersTestMultiNic-1885917485-project-member] Lock "2b4cc40e-a6fc-48df-baaf-f74352c24408" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.572s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.387713] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 674678e064eb44259028cdbc5ffdd961 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.394978] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 674678e064eb44259028cdbc5ffdd961 [ 1212.395447] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1212.397170] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 30ff5996aabd4657afafd96b59daa854 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.440428] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30ff5996aabd4657afafd96b59daa854 [ 1212.457516] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.457783] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.459311] env[61649]: INFO nova.compute.claims [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1212.460853] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 49f898023c754cebb72d118cdf63e838 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.502563] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49f898023c754cebb72d118cdf63e838 [ 1212.503957] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 41d4b13b258d46e48db3319ef23efd40 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.512345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41d4b13b258d46e48db3319ef23efd40 [ 1212.764723] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d608ca9-9ba2-4f2c-b4e2-6d6679f09751 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.773212] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3d9d51-a37e-403b-aaa8-43b26811b33f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.806211] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9716f9d6-637e-40bf-bcce-621bd327ceae {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.814014] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2973cbd0-6762-43ff-82f3-078c870f9aed {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.827345] env[61649]: DEBUG nova.compute.provider_tree [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.827866] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 95aeb9d4844a461fb30d4de21ec813bf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.837059] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95aeb9d4844a461fb30d4de21ec813bf [ 1212.838068] env[61649]: DEBUG nova.scheduler.client.report [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1212.840484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 4e8d9fb0d51748d1a8f7ebfd2e39a4ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.852695] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e8d9fb0d51748d1a8f7ebfd2e39a4ac [ 1212.853600] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.396s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.854075] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1212.855742] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 433af72bfd0b43e88d44258685edfa78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.888503] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 433af72bfd0b43e88d44258685edfa78 [ 1212.890366] env[61649]: DEBUG nova.compute.utils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1212.890970] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg b03d69d540a94a78b17240fee13e1688 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.892046] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1212.892209] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1212.900846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b03d69d540a94a78b17240fee13e1688 [ 1212.901400] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1212.903025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 6ffbe9e0856c49f183367023d2a1139d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.935565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ffbe9e0856c49f183367023d2a1139d [ 1212.938272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg ae2b453f2700489f8a9d7c5ba7ac16f3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1212.963087] env[61649]: DEBUG nova.policy [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8c6e9a3ba6a48669b1772886e22e023', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a732894bf424b5e9e3e972af47a7314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1212.975383] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae2b453f2700489f8a9d7c5ba7ac16f3 [ 1212.976612] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1213.002905] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1213.002905] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1213.002905] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.003127] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1213.003127] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.003127] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1213.003127] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1213.003127] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1213.003302] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1213.003302] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1213.003302] env[61649]: DEBUG nova.virt.hardware [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1213.004406] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d74f99-fc2f-437f-9322-00bbac2ff600 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.013246] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df1a7fa-ea2d-4c14-becd-34cc32b70fa7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.501221] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Successfully created port: 28c3286e-e908-4158-bbaf-896144bf9507 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1214.544896] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Successfully updated port: 28c3286e-e908-4158-bbaf-896144bf9507 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1214.545651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg cf07e8c9da8d45e5bd102164ed48fdcd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1214.553403] env[61649]: DEBUG nova.compute.manager [req-f9ea5bf7-348f-4b81-a8ea-b2990ea5f7ce req-866503e2-aae5-41ba-9f59-76b0ab0e1cc0 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Received event network-vif-plugged-28c3286e-e908-4158-bbaf-896144bf9507 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1214.553676] env[61649]: DEBUG oslo_concurrency.lockutils [req-f9ea5bf7-348f-4b81-a8ea-b2990ea5f7ce req-866503e2-aae5-41ba-9f59-76b0ab0e1cc0 service nova] Acquiring lock "0b0050ff-2714-4068-9956-089c6aa3eff1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.553879] env[61649]: DEBUG oslo_concurrency.lockutils [req-f9ea5bf7-348f-4b81-a8ea-b2990ea5f7ce req-866503e2-aae5-41ba-9f59-76b0ab0e1cc0 service nova] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.553980] env[61649]: DEBUG oslo_concurrency.lockutils [req-f9ea5bf7-348f-4b81-a8ea-b2990ea5f7ce req-866503e2-aae5-41ba-9f59-76b0ab0e1cc0 service nova] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.554145] env[61649]: DEBUG nova.compute.manager [req-f9ea5bf7-348f-4b81-a8ea-b2990ea5f7ce req-866503e2-aae5-41ba-9f59-76b0ab0e1cc0 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] No waiting events found dispatching network-vif-plugged-28c3286e-e908-4158-bbaf-896144bf9507 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1214.554309] env[61649]: WARNING nova.compute.manager [req-f9ea5bf7-348f-4b81-a8ea-b2990ea5f7ce req-866503e2-aae5-41ba-9f59-76b0ab0e1cc0 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Received unexpected event network-vif-plugged-28c3286e-e908-4158-bbaf-896144bf9507 for instance with vm_state building and task_state spawning. [ 1214.555747] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf07e8c9da8d45e5bd102164ed48fdcd [ 1214.555981] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "refresh_cache-0b0050ff-2714-4068-9956-089c6aa3eff1" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.556130] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "refresh_cache-0b0050ff-2714-4068-9956-089c6aa3eff1" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.556274] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1214.556691] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 275860e3cda442a6a0e638f0c310e902 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1214.566928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 275860e3cda442a6a0e638f0c310e902 [ 1214.596467] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1214.826246] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Updating instance_info_cache with network_info: [{"id": "28c3286e-e908-4158-bbaf-896144bf9507", "address": "fa:16:3e:4b:9e:4a", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c3286e-e9", "ovs_interfaceid": "28c3286e-e908-4158-bbaf-896144bf9507", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.826849] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 2a4798ffb2bc4495bfbf8342c58f1f70 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1214.840610] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a4798ffb2bc4495bfbf8342c58f1f70 [ 1214.841226] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "refresh_cache-0b0050ff-2714-4068-9956-089c6aa3eff1" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.841513] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance network_info: |[{"id": "28c3286e-e908-4158-bbaf-896144bf9507", "address": "fa:16:3e:4b:9e:4a", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c3286e-e9", "ovs_interfaceid": "28c3286e-e908-4158-bbaf-896144bf9507", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1214.841931] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:9e:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28c3286e-e908-4158-bbaf-896144bf9507', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1214.849827] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating folder: Project (4a732894bf424b5e9e3e972af47a7314). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1214.852557] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12b4147d-0cc1-4885-be9b-2d22137e3525 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.862314] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created folder: Project (4a732894bf424b5e9e3e972af47a7314) in parent group-v51588. [ 1214.862528] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating folder: Instances. Parent ref: group-v51661. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1214.862780] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7165561-5e18-4672-bde0-3055a1859d70 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.871439] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created folder: Instances in parent group-v51661. [ 1214.871711] env[61649]: DEBUG oslo.service.loopingcall [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1214.872145] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1214.872365] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e752428d-3e9a-4cad-b19b-9e425f68be6c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.898460] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1214.898460] env[61649]: value = "task-158209" [ 1214.898460] env[61649]: _type = "Task" [ 1214.898460] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.903074] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158209, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.404386] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158209, 'name': CreateVM_Task, 'duration_secs': 0.294222} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.404875] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1215.412177] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.412555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.412876] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1215.413287] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6730a46-f1b4-43e7-8360-77d7393725de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.418239] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1215.418239] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]524ae2f5-1c44-53c6-b222-d5d7ae7e25b1" [ 1215.418239] env[61649]: _type = "Task" [ 1215.418239] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.427335] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]524ae2f5-1c44-53c6-b222-d5d7ae7e25b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.928768] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.929096] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1215.929233] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.784139] env[61649]: DEBUG nova.compute.manager [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Received event network-changed-28c3286e-e908-4158-bbaf-896144bf9507 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1216.784139] env[61649]: DEBUG nova.compute.manager [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Refreshing instance network info cache due to event network-changed-28c3286e-e908-4158-bbaf-896144bf9507. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1216.784139] env[61649]: DEBUG oslo_concurrency.lockutils [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] Acquiring lock "refresh_cache-0b0050ff-2714-4068-9956-089c6aa3eff1" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.784139] env[61649]: DEBUG oslo_concurrency.lockutils [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] Acquired lock "refresh_cache-0b0050ff-2714-4068-9956-089c6aa3eff1" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.784139] env[61649]: DEBUG nova.network.neutron [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Refreshing network info cache for port 28c3286e-e908-4158-bbaf-896144bf9507 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1216.785305] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] Expecting reply to msg eb38a48cec6344ef94e50503dc16f6dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1216.792364] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb38a48cec6344ef94e50503dc16f6dc [ 1217.277989] env[61649]: DEBUG nova.network.neutron [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Updated VIF entry in instance network info cache for port 28c3286e-e908-4158-bbaf-896144bf9507. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1217.278414] env[61649]: DEBUG nova.network.neutron [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Updating instance_info_cache with network_info: [{"id": "28c3286e-e908-4158-bbaf-896144bf9507", "address": "fa:16:3e:4b:9e:4a", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c3286e-e9", "ovs_interfaceid": "28c3286e-e908-4158-bbaf-896144bf9507", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.278935] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] Expecting reply to msg c5625074140d4227b588293dfc72352a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1217.288352] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5625074140d4227b588293dfc72352a [ 1217.288963] env[61649]: DEBUG oslo_concurrency.lockutils [req-89371ad7-596e-4be1-b88c-4921c9f1e42c req-5f88e3d0-2482-4591-b1d6-406de4cc7692 service nova] Releasing lock "refresh_cache-0b0050ff-2714-4068-9956-089c6aa3eff1" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.146774] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 98fa13c0ea61488ca0ee801e3f088055 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1226.157168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98fa13c0ea61488ca0ee801e3f088055 [ 1226.158109] env[61649]: DEBUG oslo_concurrency.lockutils [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "0b0050ff-2714-4068-9956-089c6aa3eff1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.006194] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "b6243867-9546-4663-9d48-5c040537490b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.006504] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "b6243867-9546-4663-9d48-5c040537490b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.812171] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.812560] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.552985] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.980485] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f4df857aaa5c4382ab7e11c6a137b824 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1250.989456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4df857aaa5c4382ab7e11c6a137b824 [ 1251.924474] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.928119] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.928314] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.928463] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1253.929580] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.929835] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1253.929865] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1253.930424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2400993aff0b4c7f8fee992c4f50c417 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1253.947575] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2400993aff0b4c7f8fee992c4f50c417 [ 1253.949772] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.949921] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950052] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950184] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950304] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950448] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950583] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950702] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950818] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.950935] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1253.951064] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1253.951522] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.951703] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.929089] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.924135] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.924801] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5a397343f63c49e2a264b12983a1cf0e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1255.942340] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a397343f63c49e2a264b12983a1cf0e [ 1255.945780] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.946065] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ec27fad490774140b232441ffcba3f60 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1255.954683] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec27fad490774140b232441ffcba3f60 [ 1255.955507] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.955714] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.955876] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.956039] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1255.957067] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3264a95-49ee-4f60-832a-67e9971bae20 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.965687] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43f3675-3655-4bcc-b01f-823b2d9cf1b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.978893] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2f3467-779f-42e1-a97a-fedf8f519357 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.985041] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9409a6-e14e-4acd-8da8-1d0096e8649a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.014039] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181790MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1256.014191] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.014385] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.015157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e77343e4214d4710ba1630efdbc2440b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.049755] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e77343e4214d4710ba1630efdbc2440b [ 1256.053839] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e78b8a992a414f80a687c633843efdf4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.062151] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e78b8a992a414f80a687c633843efdf4 [ 1256.080023] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 95426048-d403-4dad-9ad7-b76de655a319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080023] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080023] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080023] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080196] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080196] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080196] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080196] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080313] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080313] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1256.080378] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 898b00986ff4438a9223433f72f1d2da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.090995] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898b00986ff4438a9223433f72f1d2da [ 1256.091648] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.092111] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4b291b23bd9743abb60339164a9ab121 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.101123] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b291b23bd9743abb60339164a9ab121 [ 1256.101769] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.102219] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg aac48c70ff8447fe8bdbf0551a25c1af in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.111159] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aac48c70ff8447fe8bdbf0551a25c1af [ 1256.111793] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.112236] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 22347c32a6cc45718c9dee7023f25096 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.120964] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22347c32a6cc45718c9dee7023f25096 [ 1256.121551] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.122105] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b52acac214474cca9bbd43d791d05c1d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.129953] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b52acac214474cca9bbd43d791d05c1d [ 1256.130559] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.130964] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 47371fe096e44c94a2f44ccf56090bb3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.142292] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47371fe096e44c94a2f44ccf56090bb3 [ 1256.142895] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.143110] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1256.143255] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1256.328123] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9bb902-cfa4-4096-ab9e-e06582072214 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.335580] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9602ea4-2751-484f-af61-e64f34a4d064 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.365442] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba24b82f-df72-4819-9f3a-4b8d5a0d795c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.372325] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2c3dab-13c8-4df2-a615-a88a76c424b6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.384784] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.385233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 68c7cba5e4bd4a7c92573eed10054761 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.391968] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68c7cba5e4bd4a7c92573eed10054761 [ 1256.392829] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1256.395010] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 14ee4ccbd3de4b508ba7022ddfcca355 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1256.405582] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14ee4ccbd3de4b508ba7022ddfcca355 [ 1256.406195] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1256.406372] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.392s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.839848] env[61649]: WARNING oslo_vmware.rw_handles [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1260.839848] env[61649]: ERROR oslo_vmware.rw_handles [ 1260.840437] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1260.842125] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1260.842407] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Copying Virtual Disk [datastore1] vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/2b9651ad-3676-45bd-b92d-36fdfbb0b58e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1260.842703] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a51279a6-bdbf-46d4-8740-42c91715bb3f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.850243] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Waiting for the task: (returnval){ [ 1260.850243] env[61649]: value = "task-158210" [ 1260.850243] env[61649]: _type = "Task" [ 1260.850243] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.857981] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Task: {'id': task-158210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.360480] env[61649]: DEBUG oslo_vmware.exceptions [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1261.360718] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1261.361264] env[61649]: ERROR nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1261.361264] env[61649]: Faults: ['InvalidArgument'] [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] Traceback (most recent call last): [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] yield resources [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self.driver.spawn(context, instance, image_meta, [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self._fetch_image_if_missing(context, vi) [ 1261.361264] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] image_cache(vi, tmp_image_ds_loc) [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] vm_util.copy_virtual_disk( [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] session._wait_for_task(vmdk_copy_task) [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] return self.wait_for_task(task_ref) [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] return evt.wait() [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] result = hub.switch() [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1261.361786] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] return self.greenlet.switch() [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self.f(*self.args, **self.kw) [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] raise exceptions.translate_fault(task_info.error) [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] Faults: ['InvalidArgument'] [ 1261.362282] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] [ 1261.362282] env[61649]: INFO nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Terminating instance [ 1261.363078] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.363280] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1261.363507] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f9f061d-204f-413b-a7d9-b01b31b8475c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.365538] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1261.365730] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1261.366404] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81040db4-a0ba-49b8-8c0f-9cfc6b116661 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.373038] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1261.373245] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ba99d4f-8af3-4cfc-9043-b58023b76ab2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.375196] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1261.375367] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1261.376297] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50ee83ff-a5d9-423f-a443-52c7503e2dc4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.381164] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Waiting for the task: (returnval){ [ 1261.381164] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522cca02-1785-a72b-6ae2-52eaeef719d1" [ 1261.381164] env[61649]: _type = "Task" [ 1261.381164] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.388256] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522cca02-1785-a72b-6ae2-52eaeef719d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.438300] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1261.438509] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1261.438687] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Deleting the datastore file [datastore1] 95426048-d403-4dad-9ad7-b76de655a319 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1261.438933] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0266f818-d551-4b56-a7c6-ef7b0163f257 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.444959] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Waiting for the task: (returnval){ [ 1261.444959] env[61649]: value = "task-158212" [ 1261.444959] env[61649]: _type = "Task" [ 1261.444959] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.451777] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Task: {'id': task-158212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.891317] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1261.891605] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Creating directory with path [datastore1] vmware_temp/cf35afd3-62d0-4cb5-8ef2-1cb39a745ca3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1261.891815] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cb0ba46-9085-49af-9064-0f305844eb41 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.911675] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Created directory with path [datastore1] vmware_temp/cf35afd3-62d0-4cb5-8ef2-1cb39a745ca3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1261.911870] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Fetch image to [datastore1] vmware_temp/cf35afd3-62d0-4cb5-8ef2-1cb39a745ca3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1261.912056] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/cf35afd3-62d0-4cb5-8ef2-1cb39a745ca3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1261.912823] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac151e3-6a5d-47f6-9cdc-f1b8e2b0600d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.919294] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0011f36-e8fe-444f-b499-1aeef41e5446 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.930930] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1d33c6-2386-4d01-b84b-63134b9e6507 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.964329] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2d0cc5-7ba5-4eb2-b55e-40a5362d6563 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.971435] env[61649]: DEBUG oslo_vmware.api [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Task: {'id': task-158212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082113} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.972897] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.973089] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1261.973262] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1261.973461] env[61649]: INFO nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1261.975212] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-efbd98fb-4da5-4af1-877e-7f1acd67b25c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.977254] env[61649]: DEBUG nova.compute.claims [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1261.977423] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.977643] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.979426] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 2b503452e1b84034aa67c3396293b617 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1261.998561] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1262.012267] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b503452e1b84034aa67c3396293b617 [ 1262.153056] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1262.154560] env[61649]: ERROR nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = getattr(controller, method)(*args, **kwargs) [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._get(image_id) [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1262.154560] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] resp, body = self.http_client.get(url, headers=header) [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.request(url, 'GET', **kwargs) [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._handle_response(resp) [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exc.from_response(resp, resp.content) [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1262.154859] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] yield resources [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self.driver.spawn(context, instance, image_meta, [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._fetch_image_if_missing(context, vi) [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image_fetch(context, vi, tmp_image_ds_loc) [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] images.fetch_image( [ 1262.155172] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] metadata = IMAGE_API.get(context, image_ref) [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return session.show(context, image_id, [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] _reraise_translated_image_exception(image_id) [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise new_exc.with_traceback(exc_trace) [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = getattr(controller, method)(*args, **kwargs) [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1262.155508] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._get(image_id) [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] resp, body = self.http_client.get(url, headers=header) [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.request(url, 'GET', **kwargs) [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._handle_response(resp) [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exc.from_response(resp, resp.content) [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1262.155832] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1262.156159] env[61649]: INFO nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Terminating instance [ 1262.156368] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.156584] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1262.157431] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1262.157689] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1262.157930] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6a8a8ce-277b-4bdd-bee8-d5c653b1d70a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.161051] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabfafb2-80ae-48ef-be34-78c34c9d0fdd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.170355] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1262.171329] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23faf412-4cfd-46f0-a695-bfdd2ddff265 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.172735] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1262.172909] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1262.173566] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df6a4c5-7c4f-4dc5-b558-651dc474e76e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.181816] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1262.181816] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ad5bb7-ea51-6a39-af18-f340d187e514" [ 1262.181816] env[61649]: _type = "Task" [ 1262.181816] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.187214] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ad5bb7-ea51-6a39-af18-f340d187e514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.212225] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56c1178-db7b-4590-af1a-fc8cd9ad7f28 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.218764] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a72456-f474-42a0-bdf4-41360e5017f8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.250293] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d3d2f1-1edd-4990-a618-3f3258c34ea5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.257188] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faeb3607-ed57-4919-8c74-9aa677556dcc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.270011] env[61649]: DEBUG nova.compute.provider_tree [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.270546] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 6b02e9e61ec34dc4bd5326fba74a809a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.277569] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b02e9e61ec34dc4bd5326fba74a809a [ 1262.278469] env[61649]: DEBUG nova.scheduler.client.report [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1262.280743] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg bd878d3a7fc74c6eaa9fda091eebdb09 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.294926] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd878d3a7fc74c6eaa9fda091eebdb09 [ 1262.295673] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.318s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.296197] env[61649]: ERROR nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1262.296197] env[61649]: Faults: ['InvalidArgument'] [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] Traceback (most recent call last): [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self.driver.spawn(context, instance, image_meta, [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self._fetch_image_if_missing(context, vi) [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] image_cache(vi, tmp_image_ds_loc) [ 1262.296197] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] vm_util.copy_virtual_disk( [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] session._wait_for_task(vmdk_copy_task) [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] return self.wait_for_task(task_ref) [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] return evt.wait() [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] result = hub.switch() [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] return self.greenlet.switch() [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1262.296546] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] self.f(*self.args, **self.kw) [ 1262.296877] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1262.296877] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] raise exceptions.translate_fault(task_info.error) [ 1262.296877] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1262.296877] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] Faults: ['InvalidArgument'] [ 1262.296877] env[61649]: ERROR nova.compute.manager [instance: 95426048-d403-4dad-9ad7-b76de655a319] [ 1262.297012] env[61649]: DEBUG nova.compute.utils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1262.298349] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Build of instance 95426048-d403-4dad-9ad7-b76de655a319 was re-scheduled: A specified parameter was not correct: fileType [ 1262.298349] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1262.298719] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1262.298886] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1262.299050] env[61649]: DEBUG nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1262.299207] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1262.545913] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg a3b729bb17c54ebd969b94280b6806a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.556525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3b729bb17c54ebd969b94280b6806a7 [ 1262.557024] env[61649]: DEBUG nova.network.neutron [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.557526] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 97f59aac9079408ab820985e26cb285c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.566420] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97f59aac9079408ab820985e26cb285c [ 1262.566997] env[61649]: INFO nova.compute.manager [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Took 0.27 seconds to deallocate network for instance. [ 1262.568685] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg bb354b16db6f4a7aa7b3db56b117d2b2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.601486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb354b16db6f4a7aa7b3db56b117d2b2 [ 1262.604141] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg ecd07dfb365b4311adf5f652d3acd0b3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.638086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecd07dfb365b4311adf5f652d3acd0b3 [ 1262.661476] env[61649]: INFO nova.scheduler.client.report [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Deleted allocations for instance 95426048-d403-4dad-9ad7-b76de655a319 [ 1262.667513] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 79ea7171cf8840c7b7d78e62f4512362 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.679028] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79ea7171cf8840c7b7d78e62f4512362 [ 1262.679671] env[61649]: DEBUG oslo_concurrency.lockutils [None req-465eb2cc-4950-483b-b6f0-ac0ba1e02bb3 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "95426048-d403-4dad-9ad7-b76de655a319" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 673.839s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.680241] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 31fb44c8a60049838f03fdc7200b5064 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.681072] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "95426048-d403-4dad-9ad7-b76de655a319" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 476.866s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.681307] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Acquiring lock "95426048-d403-4dad-9ad7-b76de655a319-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.681524] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "95426048-d403-4dad-9ad7-b76de655a319-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.681691] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "95426048-d403-4dad-9ad7-b76de655a319-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.683486] env[61649]: INFO nova.compute.manager [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Terminating instance [ 1262.685148] env[61649]: DEBUG nova.compute.manager [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1262.685328] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1262.686019] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b108f864-9e12-4865-b4dd-be9a891e03ea {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.698844] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7bc667-0ef8-4547-9557-4b5d10422943 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.709656] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31fb44c8a60049838f03fdc7200b5064 [ 1262.711734] env[61649]: DEBUG nova.compute.manager [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: 71c15bd8-5786-4d44-aa0e-3249b272ac72] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1262.713622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg df50476ded2a4907b331b06a0c15062a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.720672] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1262.720926] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating directory with path [datastore1] vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1262.721170] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bed0334c-d285-4009-84f1-cf43c3604fd2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.731919] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95426048-d403-4dad-9ad7-b76de655a319 could not be found. [ 1262.732150] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1262.732340] env[61649]: INFO nova.compute.manager [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1262.732593] env[61649]: DEBUG oslo.service.loopingcall [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1262.732826] env[61649]: DEBUG nova.compute.manager [-] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1262.732922] env[61649]: DEBUG nova.network.neutron [-] [instance: 95426048-d403-4dad-9ad7-b76de655a319] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1262.741375] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created directory with path [datastore1] vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1262.741679] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Fetch image to [datastore1] vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1262.741996] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1262.742661] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab55f92c-ac70-485e-b619-77559e8c53e5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.749946] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dbe430-778a-4a91-a682-bb5ff650d1fc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.755192] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df50476ded2a4907b331b06a0c15062a [ 1262.755778] env[61649]: DEBUG nova.compute.manager [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: 71c15bd8-5786-4d44-aa0e-3249b272ac72] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1262.756137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg b5985fd497fe4b9bbe8ae2d170e625e4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.764056] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0de8e3-a192-459d-bc6d-4a573c7fe364 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.768482] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5985fd497fe4b9bbe8ae2d170e625e4 [ 1262.797924] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cde84449ba9340b29696c503b1ecc6c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.799556] env[61649]: DEBUG oslo_concurrency.lockutils [None req-031c8a30-8f5f-4862-8a61-ea1c8cd9187e tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "71c15bd8-5786-4d44-aa0e-3249b272ac72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.376s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.800272] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c70475-4342-4367-8ac3-359985b6f0b6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.802992] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] Expecting reply to msg 77424245afca4bc982ecd27ab881aaaf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.805032] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cde84449ba9340b29696c503b1ecc6c4 [ 1262.805350] env[61649]: DEBUG nova.network.neutron [-] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.805748] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg faf1a989c7dc4f7fb661f4d17c225519 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.808214] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-334b89ea-00f8-4118-bad3-bdee261c84f5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.811748] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77424245afca4bc982ecd27ab881aaaf [ 1262.812391] env[61649]: DEBUG nova.compute.manager [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] [instance: d7adbc9f-af82-4f99-8536-4411665e3233] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1262.813950] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] Expecting reply to msg 9d4cc9e36df3446587dd697b7b866398 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.814962] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faf1a989c7dc4f7fb661f4d17c225519 [ 1262.815389] env[61649]: INFO nova.compute.manager [-] [instance: 95426048-d403-4dad-9ad7-b76de655a319] Took 0.08 seconds to deallocate network for instance. [ 1262.818687] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 82871ec43fda4ff096eac5dea142f317 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.837061] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1262.840236] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d4cc9e36df3446587dd697b7b866398 [ 1262.840975] env[61649]: DEBUG nova.compute.manager [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] [instance: d7adbc9f-af82-4f99-8536-4411665e3233] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1262.841313] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] Expecting reply to msg af2a11826b7e4ce8bfa8d0485830bfd9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.843388] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82871ec43fda4ff096eac5dea142f317 [ 1262.850968] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af2a11826b7e4ce8bfa8d0485830bfd9 [ 1262.858911] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg 82dd2f4a895347ba84cc9051d7de8d5e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.864024] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f8a9317c-5105-4093-9fbf-2cc64d022fbf tempest-ServerRescueTestJSON-1699452881 tempest-ServerRescueTestJSON-1699452881-project-member] Lock "d7adbc9f-af82-4f99-8536-4411665e3233" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.166s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.865040] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 2bb53015b1eb4a1889a47e003e2e6514 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.873484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bb53015b1eb4a1889a47e003e2e6514 [ 1262.873947] env[61649]: DEBUG nova.compute.manager [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: c7a35269-b314-4381-a8b4-d509d5627861] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1262.875641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg d6280174db1e47c9b06bba43dcd535ef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.894024] env[61649]: DEBUG oslo_vmware.rw_handles [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1262.955407] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6280174db1e47c9b06bba43dcd535ef [ 1262.955956] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82dd2f4a895347ba84cc9051d7de8d5e [ 1262.956591] env[61649]: DEBUG nova.compute.manager [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: c7a35269-b314-4381-a8b4-d509d5627861] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1262.956962] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg b9f0665b88824dff8cfd0b92d9c1740d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.961844] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Lock "95426048-d403-4dad-9ad7-b76de655a319" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.281s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.962150] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0191f4c5-2b2e-4b25-984b-159db45541a6 tempest-ServerRescueTestJSONUnderV235-579567353 tempest-ServerRescueTestJSONUnderV235-579567353-project-member] Expecting reply to msg f23bbafeb18c47e280d5bb5ff3e02542 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.962884] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "95426048-d403-4dad-9ad7-b76de655a319" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 134.936s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.963083] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 95426048-d403-4dad-9ad7-b76de655a319] During sync_power_state the instance has a pending task (deleting). Skip. [ 1262.963258] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "95426048-d403-4dad-9ad7-b76de655a319" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.963719] env[61649]: DEBUG oslo_vmware.rw_handles [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1262.963901] env[61649]: DEBUG oslo_vmware.rw_handles [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1262.968033] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9f0665b88824dff8cfd0b92d9c1740d [ 1262.976315] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f23bbafeb18c47e280d5bb5ff3e02542 [ 1262.980019] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4f3872f5-ec64-4477-a9da-4beaee7ecda7 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "c7a35269-b314-4381-a8b4-d509d5627861" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.876s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.980817] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 36ca6e6718aa4c96a202f459fef679a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1262.991176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36ca6e6718aa4c96a202f459fef679a5 [ 1262.991605] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1262.993268] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 97f574c94b1a4fffbaa43f397621917e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.023461] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97f574c94b1a4fffbaa43f397621917e [ 1263.039029] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.039277] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.040867] env[61649]: INFO nova.compute.claims [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1263.042540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg a10b2cc545084af291fee313fb20032d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.078737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a10b2cc545084af291fee313fb20032d [ 1263.079621] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 2f2d308f5bf9494cbaa2e84574470c74 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.090008] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f2d308f5bf9494cbaa2e84574470c74 [ 1263.268228] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd29c95a-1fb7-4f3c-b6cc-fc02dcc52d04 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.275478] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8653732e-f328-4a30-a9d2-fcfb7eb685bc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.305356] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9623d8-6ee0-4287-889d-5af1ca65b053 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.312103] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00f4aff-2a6d-4a96-90b3-3df5ed477193 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.324409] env[61649]: DEBUG nova.compute.provider_tree [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.324890] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 01bd018adc1640378fe8e7f9dfb6a122 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.331535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01bd018adc1640378fe8e7f9dfb6a122 [ 1263.332511] env[61649]: DEBUG nova.scheduler.client.report [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1263.334688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 1521f3258cf64c128faa00e35bb10dcb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.349803] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1521f3258cf64c128faa00e35bb10dcb [ 1263.350564] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.311s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.351055] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1263.352752] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 8a48fe1e9282465d8c839ab3d01cf518 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.386980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a48fe1e9282465d8c839ab3d01cf518 [ 1263.388111] env[61649]: DEBUG nova.compute.utils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1263.388704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 7eaff078c1274f24aa61d1aa54639069 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.389558] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Not allocating networking since 'none' was specified. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1263.398014] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eaff078c1274f24aa61d1aa54639069 [ 1263.398503] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1263.400109] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 8ea3f13f9cc14c77a03e352938631937 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.427195] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ea3f13f9cc14c77a03e352938631937 [ 1263.430116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 4bcb516a7ad242fa99b4deced9ca6754 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1263.462933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bcb516a7ad242fa99b4deced9ca6754 [ 1263.464160] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1263.501099] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1263.501349] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1263.501510] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1263.501716] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1263.501875] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1263.502023] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1263.502233] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1263.502495] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1263.502557] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1263.502718] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1263.502890] env[61649]: DEBUG nova.virt.hardware [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1263.504111] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5844a29d-ad65-4cce-a3d9-d318c989f549 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.511849] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b68fac-8470-448e-ba5d-bf874527f8e6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.526495] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance VIF info [] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1263.531898] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Creating folder: Project (fc46ac03215f4d80bb06f34fcbae6b84). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1263.532191] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd5c6f82-390a-472b-9f30-6ae0c1d3d7f9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.543289] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Created folder: Project (fc46ac03215f4d80bb06f34fcbae6b84) in parent group-v51588. [ 1263.543289] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Creating folder: Instances. Parent ref: group-v51664. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1263.543289] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cda83c37-9ed9-4660-890c-a11ebaa1988a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.551295] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Created folder: Instances in parent group-v51664. [ 1263.551529] env[61649]: DEBUG oslo.service.loopingcall [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1263.551713] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1263.551905] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0102d89-5f79-4f3a-8bfb-2af88071a8a3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.567484] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1263.567484] env[61649]: value = "task-158216" [ 1263.567484] env[61649]: _type = "Task" [ 1263.567484] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.576122] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158216, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.879657] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1263.879880] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1263.880068] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Deleting the datastore file [datastore1] a0db1e96-4ca4-4fed-b86b-d8457f3570a9 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1263.880353] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9540f0e1-6ec5-4df0-b0ef-9c1678b437e1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.886911] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Waiting for the task: (returnval){ [ 1263.886911] env[61649]: value = "task-158217" [ 1263.886911] env[61649]: _type = "Task" [ 1263.886911] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.895030] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Task: {'id': task-158217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.078549] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158216, 'name': CreateVM_Task, 'duration_secs': 0.26342} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.078812] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1264.079170] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.079363] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.079688] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1264.079954] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eab2739e-3af8-4028-b2a0-be6f956beb33 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.086993] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for the task: (returnval){ [ 1264.086993] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5296b5f0-fc76-0b43-72a9-5cbe84a588e8" [ 1264.086993] env[61649]: _type = "Task" [ 1264.086993] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.094808] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5296b5f0-fc76-0b43-72a9-5cbe84a588e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.398082] env[61649]: DEBUG oslo_vmware.api [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Task: {'id': task-158217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079011} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.398363] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1264.398552] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1264.398727] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1264.398902] env[61649]: INFO nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1264.401132] env[61649]: DEBUG nova.compute.claims [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1264.401306] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.401520] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.403933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 78a3828421b0437a9a7d4aa217230937 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.435967] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78a3828421b0437a9a7d4aa217230937 [ 1264.598890] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.599148] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.599415] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.619666] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69547a0-1b44-461e-9e0b-4b33aabf91e5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.626667] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3db14d-fd89-4b5c-8aff-5857b03d7e7c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.656735] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76aafcf-bd13-4169-812b-8de40762b856 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.663887] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb0ebe2-1623-4b2b-8c44-5e46cd02aebd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.676346] env[61649]: DEBUG nova.compute.provider_tree [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1264.676841] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg df19bc95f51a48ce976fda7667a1d0b2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.684359] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df19bc95f51a48ce976fda7667a1d0b2 [ 1264.685264] env[61649]: DEBUG nova.scheduler.client.report [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1264.687468] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 9f57274f98184ac5ace66985a0b7b858 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.719938] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f57274f98184ac5ace66985a0b7b858 [ 1264.720116] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.318s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.720877] env[61649]: ERROR nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = getattr(controller, method)(*args, **kwargs) [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._get(image_id) [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1264.720877] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] resp, body = self.http_client.get(url, headers=header) [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.request(url, 'GET', **kwargs) [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._handle_response(resp) [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exc.from_response(resp, resp.content) [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.721256] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self.driver.spawn(context, instance, image_meta, [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._fetch_image_if_missing(context, vi) [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image_fetch(context, vi, tmp_image_ds_loc) [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] images.fetch_image( [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] metadata = IMAGE_API.get(context, image_ref) [ 1264.721569] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return session.show(context, image_id, [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] _reraise_translated_image_exception(image_id) [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise new_exc.with_traceback(exc_trace) [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = getattr(controller, method)(*args, **kwargs) [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._get(image_id) [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1264.721919] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] resp, body = self.http_client.get(url, headers=header) [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.request(url, 'GET', **kwargs) [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._handle_response(resp) [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exc.from_response(resp, resp.content) [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1264.722238] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.722492] env[61649]: DEBUG nova.compute.utils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1264.723181] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Build of instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 was re-scheduled: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1264.723639] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1264.723816] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1264.723972] env[61649]: DEBUG nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1264.724166] env[61649]: DEBUG nova.network.neutron [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1264.878190] env[61649]: DEBUG neutronclient.v2_0.client [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1264.879484] env[61649]: ERROR nova.compute.manager [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = getattr(controller, method)(*args, **kwargs) [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._get(image_id) [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1264.879484] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] resp, body = self.http_client.get(url, headers=header) [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.request(url, 'GET', **kwargs) [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._handle_response(resp) [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exc.from_response(resp, resp.content) [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.879830] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self.driver.spawn(context, instance, image_meta, [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._fetch_image_if_missing(context, vi) [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image_fetch(context, vi, tmp_image_ds_loc) [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] images.fetch_image( [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] metadata = IMAGE_API.get(context, image_ref) [ 1264.880177] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return session.show(context, image_id, [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] _reraise_translated_image_exception(image_id) [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise new_exc.with_traceback(exc_trace) [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = getattr(controller, method)(*args, **kwargs) [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._get(image_id) [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1264.880561] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] resp, body = self.http_client.get(url, headers=header) [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.request(url, 'GET', **kwargs) [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self._handle_response(resp) [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exc.from_response(resp, resp.content) [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.880917] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._build_and_run_instance(context, instance, image, [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exception.RescheduledException( [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] nova.exception.RescheduledException: Build of instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 was re-scheduled: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1264.881241] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] exception_handler_v20(status_code, error_body) [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise client_exc(message=error_message, [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Neutron server returns request_ids: ['req-ac53cf64-795b-453f-9f21-f5fdc2795167'] [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._deallocate_network(context, instance, requested_networks) [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self.network_api.deallocate_for_instance( [ 1264.881640] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] data = neutron.list_ports(**search_opts) [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.list('ports', self.ports_path, retrieve_all, [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] for r in self._pagination(collection, path, **params): [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] res = self.get(path, params=params) [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1264.881970] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.retry_request("GET", action, body=body, [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.do_request(method, action, body=body, [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._handle_fault_response(status_code, replybody, resp) [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exception.Unauthorized() [ 1264.882537] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] nova.exception.Unauthorized: Not authorized. [ 1264.882820] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1264.882820] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 718170f545fd459ba82801f9deb39f1d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.911864] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 718170f545fd459ba82801f9deb39f1d [ 1264.931351] env[61649]: INFO nova.scheduler.client.report [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Deleted allocations for instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 [ 1264.937984] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg dabdc3f9e2894005a78c3cf0798f86e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.947057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dabdc3f9e2894005a78c3cf0798f86e7 [ 1264.947728] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4790e9b7-467d-4a60-980b-7d1369f68d8f tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 644.008s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.948261] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg f71fb5e156614acba69ddadff6a8eabb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.948954] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 445.141s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.949171] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Acquiring lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.949412] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.949589] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.951456] env[61649]: INFO nova.compute.manager [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Terminating instance [ 1264.953625] env[61649]: DEBUG nova.compute.manager [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1264.953826] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1264.954278] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c994ba67-bbbf-4da8-9d02-760810d4c93a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.964054] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b49a7d-a6aa-43f3-b96e-e90730aad9fc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.974770] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f71fb5e156614acba69ddadff6a8eabb [ 1264.975249] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1264.977086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 52aa20b76fa143a5836dcb57d02d9c37 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1264.994438] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a0db1e96-4ca4-4fed-b86b-d8457f3570a9 could not be found. [ 1264.994648] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1264.994826] env[61649]: INFO nova.compute.manager [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1264.995069] env[61649]: DEBUG oslo.service.loopingcall [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1264.995288] env[61649]: DEBUG nova.compute.manager [-] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1264.995385] env[61649]: DEBUG nova.network.neutron [-] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1265.011168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52aa20b76fa143a5836dcb57d02d9c37 [ 1265.026532] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.026676] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.028569] env[61649]: INFO nova.compute.claims [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1265.030284] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 470279d6d7e94e7282d1b36b7e1d12e1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.065007] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 470279d6d7e94e7282d1b36b7e1d12e1 [ 1265.066824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 4686f9bbea824b4a93e1bf453fe1d2b8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.076396] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4686f9bbea824b4a93e1bf453fe1d2b8 [ 1265.079730] env[61649]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1265.079964] env[61649]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-6f916948-06c1-4fdc-bd0c-8d5f4f3f3d10'] [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1265.080674] env[61649]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1265.081378] env[61649]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.082206] env[61649]: ERROR oslo.service.loopingcall [ 1265.082894] env[61649]: ERROR nova.compute.manager [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.083015] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 163ce473e4cf4e11ac9dad52ba97265d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.108164] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 163ce473e4cf4e11ac9dad52ba97265d [ 1265.109858] env[61649]: ERROR nova.compute.manager [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] exception_handler_v20(status_code, error_body) [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise client_exc(message=error_message, [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Neutron server returns request_ids: ['req-6f916948-06c1-4fdc-bd0c-8d5f4f3f3d10'] [ 1265.109858] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During handling of the above exception, another exception occurred: [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Traceback (most recent call last): [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._delete_instance(context, instance, bdms) [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._shutdown_instance(context, instance, bdms) [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._try_deallocate_network(context, instance, requested_networks) [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] with excutils.save_and_reraise_exception(): [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1265.110191] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self.force_reraise() [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise self.value [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] _deallocate_network_with_retries() [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return evt.wait() [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = hub.switch() [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.greenlet.switch() [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1265.110505] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = func(*self.args, **self.kw) [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] result = f(*args, **kwargs) [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._deallocate_network( [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self.network_api.deallocate_for_instance( [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] data = neutron.list_ports(**search_opts) [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.list('ports', self.ports_path, retrieve_all, [ 1265.110755] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] for r in self._pagination(collection, path, **params): [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] res = self.get(path, params=params) [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.retry_request("GET", action, body=body, [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1265.111013] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] return self.do_request(method, action, body=body, [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] ret = obj(*args, **kwargs) [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] self._handle_fault_response(status_code, replybody, resp) [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.111279] env[61649]: ERROR nova.compute.manager [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] [ 1265.111457] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 02091a961b1d4e8482a14192b38895d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.137893] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02091a961b1d4e8482a14192b38895d3 [ 1265.138925] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.139445] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 5b105b286aee474b8825e01a71b16632 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.142296] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 137.115s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.142486] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1265.142658] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "a0db1e96-4ca4-4fed-b86b-d8457f3570a9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.148865] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b105b286aee474b8825e01a71b16632 [ 1265.149824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg 6172d1a53f7c4ad6826d0b877f436c05 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.164271] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6172d1a53f7c4ad6826d0b877f436c05 [ 1265.166096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Expecting reply to msg a66b8c45682b44ed8aae045f3a60017f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.194733] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66b8c45682b44ed8aae045f3a60017f [ 1265.195760] env[61649]: INFO nova.compute.manager [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] [instance: a0db1e96-4ca4-4fed-b86b-d8457f3570a9] Successfully reverted task state from None on failure for instance. [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server [None req-ca90aa73-eb06-4689-8ec0-988713ff447b tempest-TenantUsagesTestJSON-1836427215 tempest-TenantUsagesTestJSON-1836427215-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-6f916948-06c1-4fdc-bd0c-8d5f4f3f3d10'] [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1265.199463] env[61649]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1265.199906] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1265.200398] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1265.200876] env[61649]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.201270] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1265.201756] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1265.202255] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1265.202255] env[61649]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1265.202255] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1265.202255] env[61649]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1265.202255] env[61649]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1265.202255] env[61649]: ERROR oslo_messaging.rpc.server [ 1265.253928] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3b1858-a977-4cd6-a9c7-e14b3a29ab1c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.261507] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4102b7e6-7693-4125-86dc-2275a157e016 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.290073] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee03d09-42ed-470a-b757-417d906d5564 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.296798] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549340e6-2586-4228-a3ea-1b67db7bc059 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.309329] env[61649]: DEBUG nova.compute.provider_tree [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.309796] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg a98edf0281794e9aa18c389acbc2c77f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.316842] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a98edf0281794e9aa18c389acbc2c77f [ 1265.317718] env[61649]: DEBUG nova.scheduler.client.report [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1265.319891] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg b7376d9b24934108bb80f9f3edcbcc35 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.332064] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7376d9b24934108bb80f9f3edcbcc35 [ 1265.332727] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.333181] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1265.334845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 77fd070de260438eb4bcc167a87b55c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.362334] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77fd070de260438eb4bcc167a87b55c4 [ 1265.363786] env[61649]: DEBUG nova.compute.utils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1265.364366] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg d360cef33c614d8493b6a4fee31f25e4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.365276] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1265.365451] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1265.372080] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d360cef33c614d8493b6a4fee31f25e4 [ 1265.372574] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1265.374177] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 5a6e2303bf694195aad37a2e95ab53ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.401117] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a6e2303bf694195aad37a2e95ab53ce [ 1265.403765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 2d3ea99378c3485d8ea025d15bec352e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1265.409122] env[61649]: DEBUG nova.policy [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73f73ecceb844895bbdf126203901353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca010f93a4ee40db98cd5885b47d21d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1265.432502] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d3ea99378c3485d8ea025d15bec352e [ 1265.433633] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1265.454399] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1265.454634] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1265.454790] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1265.454973] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1265.455383] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1265.455546] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1265.455754] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1265.455914] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1265.456096] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1265.456937] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1265.456937] env[61649]: DEBUG nova.virt.hardware [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1265.457270] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859d31e7-389c-4f92-b121-c40a42ef4362 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.465050] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf3c36d-7ad7-4219-ad61-1850d120f6e7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.763496] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Successfully created port: a0f51892-af8b-47bd-90e7-5d40fb12cfa5 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1266.215574] env[61649]: DEBUG nova.compute.manager [req-e9b6d8fe-d3a3-4bd8-8a38-e3ce2e197e93 req-e0b8731c-47ab-43c3-b3c6-5faee7996a4a service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Received event network-vif-plugged-a0f51892-af8b-47bd-90e7-5d40fb12cfa5 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1266.215831] env[61649]: DEBUG oslo_concurrency.lockutils [req-e9b6d8fe-d3a3-4bd8-8a38-e3ce2e197e93 req-e0b8731c-47ab-43c3-b3c6-5faee7996a4a service nova] Acquiring lock "5f67180f-6b27-4487-8858-5f57fcffd041-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.216026] env[61649]: DEBUG oslo_concurrency.lockutils [req-e9b6d8fe-d3a3-4bd8-8a38-e3ce2e197e93 req-e0b8731c-47ab-43c3-b3c6-5faee7996a4a service nova] Lock "5f67180f-6b27-4487-8858-5f57fcffd041-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.216216] env[61649]: DEBUG oslo_concurrency.lockutils [req-e9b6d8fe-d3a3-4bd8-8a38-e3ce2e197e93 req-e0b8731c-47ab-43c3-b3c6-5faee7996a4a service nova] Lock "5f67180f-6b27-4487-8858-5f57fcffd041-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.216385] env[61649]: DEBUG nova.compute.manager [req-e9b6d8fe-d3a3-4bd8-8a38-e3ce2e197e93 req-e0b8731c-47ab-43c3-b3c6-5faee7996a4a service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] No waiting events found dispatching network-vif-plugged-a0f51892-af8b-47bd-90e7-5d40fb12cfa5 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1266.216549] env[61649]: WARNING nova.compute.manager [req-e9b6d8fe-d3a3-4bd8-8a38-e3ce2e197e93 req-e0b8731c-47ab-43c3-b3c6-5faee7996a4a service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Received unexpected event network-vif-plugged-a0f51892-af8b-47bd-90e7-5d40fb12cfa5 for instance with vm_state building and task_state spawning. [ 1266.294350] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Successfully updated port: a0f51892-af8b-47bd-90e7-5d40fb12cfa5 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1266.294350] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg ceab666abc284b9597cc942f7e83a268 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1266.298863] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ceab666abc284b9597cc942f7e83a268 [ 1266.299521] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "refresh_cache-5f67180f-6b27-4487-8858-5f57fcffd041" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.299655] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "refresh_cache-5f67180f-6b27-4487-8858-5f57fcffd041" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.299797] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1266.300379] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 861edc6c30474f83820340ce4be6772f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1266.307176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 861edc6c30474f83820340ce4be6772f [ 1266.348060] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1266.493581] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Updating instance_info_cache with network_info: [{"id": "a0f51892-af8b-47bd-90e7-5d40fb12cfa5", "address": "fa:16:3e:73:3d:f5", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f51892-af", "ovs_interfaceid": "a0f51892-af8b-47bd-90e7-5d40fb12cfa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.494095] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg a90c156927ba4b5b82a2991bc1f69335 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1266.504523] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a90c156927ba4b5b82a2991bc1f69335 [ 1266.505155] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "refresh_cache-5f67180f-6b27-4487-8858-5f57fcffd041" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.505391] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance network_info: |[{"id": "a0f51892-af8b-47bd-90e7-5d40fb12cfa5", "address": "fa:16:3e:73:3d:f5", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f51892-af", "ovs_interfaceid": "a0f51892-af8b-47bd-90e7-5d40fb12cfa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1266.505784] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:3d:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604056d6-6dd6-47fa-9eaa-6863a3a7c488', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0f51892-af8b-47bd-90e7-5d40fb12cfa5', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1266.513565] env[61649]: DEBUG oslo.service.loopingcall [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1266.514156] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1266.514290] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3121ba6b-3da4-491f-b2c7-81a62888fb44 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.535148] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1266.535148] env[61649]: value = "task-158218" [ 1266.535148] env[61649]: _type = "Task" [ 1266.535148] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.542969] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158218, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.046642] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158218, 'name': CreateVM_Task, 'duration_secs': 0.27776} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.047111] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1267.047550] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.047725] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.048078] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1267.048935] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc9e32a3-6f07-45c0-b720-8dacd7511882 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.053202] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1267.053202] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52282766-0c73-a37d-d860-562c0acc6bac" [ 1267.053202] env[61649]: _type = "Task" [ 1267.053202] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.061598] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52282766-0c73-a37d-d860-562c0acc6bac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.562693] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1267.563380] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1267.563711] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.246339] env[61649]: DEBUG nova.compute.manager [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Received event network-changed-a0f51892-af8b-47bd-90e7-5d40fb12cfa5 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1268.246339] env[61649]: DEBUG nova.compute.manager [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Refreshing instance network info cache due to event network-changed-a0f51892-af8b-47bd-90e7-5d40fb12cfa5. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1268.246339] env[61649]: DEBUG oslo_concurrency.lockutils [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] Acquiring lock "refresh_cache-5f67180f-6b27-4487-8858-5f57fcffd041" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.246339] env[61649]: DEBUG oslo_concurrency.lockutils [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] Acquired lock "refresh_cache-5f67180f-6b27-4487-8858-5f57fcffd041" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.246480] env[61649]: DEBUG nova.network.neutron [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Refreshing network info cache for port a0f51892-af8b-47bd-90e7-5d40fb12cfa5 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1268.246951] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] Expecting reply to msg a5e8fa6b0683452cb5acb8825f311869 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1268.254414] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5e8fa6b0683452cb5acb8825f311869 [ 1268.518784] env[61649]: DEBUG nova.network.neutron [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Updated VIF entry in instance network info cache for port a0f51892-af8b-47bd-90e7-5d40fb12cfa5. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1268.519185] env[61649]: DEBUG nova.network.neutron [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Updating instance_info_cache with network_info: [{"id": "a0f51892-af8b-47bd-90e7-5d40fb12cfa5", "address": "fa:16:3e:73:3d:f5", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f51892-af", "ovs_interfaceid": "a0f51892-af8b-47bd-90e7-5d40fb12cfa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.519727] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] Expecting reply to msg e3e55e9e5ec948af8d6203411ffd7203 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1268.528290] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3e55e9e5ec948af8d6203411ffd7203 [ 1268.528853] env[61649]: DEBUG oslo_concurrency.lockutils [req-c10f8347-2420-4fbf-b355-33d54a39b557 req-eed0651c-9b9a-47e5-bed2-1446a919809f service nova] Releasing lock "refresh_cache-5f67180f-6b27-4487-8858-5f57fcffd041" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.341160] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "ff225293-ad72-499a-9b5b-147d0bc40350" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.341549] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "ff225293-ad72-499a-9b5b-147d0bc40350" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.713275] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "36d37cda-b987-4c5a-8af1-6eede009e61e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.713534] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "36d37cda-b987-4c5a-8af1-6eede009e61e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.777349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg a8971119cd5f4714b9f030e24a4b4dc0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1286.787557] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8971119cd5f4714b9f030e24a4b4dc0 [ 1286.788085] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.740788] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 190d33ace16a4655ba3c028cb3db9356 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1308.749531] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 190d33ace16a4655ba3c028cb3db9356 [ 1308.750027] env[61649]: DEBUG oslo_concurrency.lockutils [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "5f67180f-6b27-4487-8858-5f57fcffd041" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.389523] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.718112] env[61649]: WARNING oslo_vmware.rw_handles [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1310.718112] env[61649]: ERROR oslo_vmware.rw_handles [ 1310.718755] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1310.720529] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1310.720787] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Copying Virtual Disk [datastore1] vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/2010d08e-a4d6-416c-ad08-427d5f61b8b3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1310.721087] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0eeaf797-2957-4bb4-8079-49bdb6197e21 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.728813] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1310.728813] env[61649]: value = "task-158219" [ 1310.728813] env[61649]: _type = "Task" [ 1310.728813] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.736203] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.239366] env[61649]: DEBUG oslo_vmware.exceptions [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1311.239726] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.240325] env[61649]: ERROR nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1311.240325] env[61649]: Faults: ['InvalidArgument'] [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Traceback (most recent call last): [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] yield resources [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self.driver.spawn(context, instance, image_meta, [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self._fetch_image_if_missing(context, vi) [ 1311.240325] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] image_cache(vi, tmp_image_ds_loc) [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] vm_util.copy_virtual_disk( [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] session._wait_for_task(vmdk_copy_task) [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] return self.wait_for_task(task_ref) [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] return evt.wait() [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] result = hub.switch() [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1311.240845] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] return self.greenlet.switch() [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self.f(*self.args, **self.kw) [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] raise exceptions.translate_fault(task_info.error) [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Faults: ['InvalidArgument'] [ 1311.241398] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] [ 1311.241398] env[61649]: INFO nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Terminating instance [ 1311.243253] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.243253] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.243253] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab2adca0-6959-4796-9635-c8101953e088 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.245461] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1311.245696] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1311.246423] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4ca995-c5f7-4d23-bd3f-26bcc2c8ec91 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.253071] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1311.253318] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98dedd40-522a-4e4e-bff5-6949a51acf2b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.255383] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.255602] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1311.256573] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d2ec083-8f36-469a-b2c3-1df1b123eff4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.262489] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Waiting for the task: (returnval){ [ 1311.262489] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e2ff7b-5fd9-fc63-7df6-0a75b09f1f8b" [ 1311.262489] env[61649]: _type = "Task" [ 1311.262489] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.271460] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e2ff7b-5fd9-fc63-7df6-0a75b09f1f8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.323407] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1311.323407] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1311.323407] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleting the datastore file [datastore1] 29f84900-0805-4ab2-af4d-bd7be2ac94d3 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1311.323407] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90c0a4f8-afdf-49b8-9608-b8aceeded417 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.329173] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1311.329173] env[61649]: value = "task-158221" [ 1311.329173] env[61649]: _type = "Task" [ 1311.329173] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.336621] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.772379] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1311.772713] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Creating directory with path [datastore1] vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.772993] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d90a414e-93b3-4aa1-bcd3-ae2e2a085af6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.784781] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Created directory with path [datastore1] vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.785045] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Fetch image to [datastore1] vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1311.785285] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1311.786047] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59529f3e-a0d8-4ad0-a1c0-11b4992c6fde {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.792529] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ff7f63-0242-41cc-99aa-ca25c0155cf2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.801042] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2f3a01-7c3a-4dde-920f-96d390d2c29b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.833204] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fcae9a-549b-4f30-a083-15bda9fbe25d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.840239] env[61649]: DEBUG oslo_vmware.api [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074189} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.841797] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1311.842057] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1311.842296] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1311.842536] env[61649]: INFO nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1311.844414] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-30241e03-7280-45d2-99ef-d20c6b4dba22 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.846324] env[61649]: DEBUG nova.compute.claims [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1311.846558] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.846831] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.848827] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg be1b363f9a0443b29b2150bfe15a7f0e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1311.867669] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1311.890510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be1b363f9a0443b29b2150bfe15a7f0e [ 1311.913766] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1311.969710] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.973086] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.973245] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1311.975495] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1311.975779] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1312.137811] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b775c9d-3671-468f-89d3-6fec64fb95aa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.145847] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5a7362-7863-4e80-a454-c17ad40b4060 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.175787] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aa6369-33f3-4189-9e30-d163e4eb7316 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.182542] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1107fec-0841-484d-82c6-532ada82dd56 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.195088] env[61649]: DEBUG nova.compute.provider_tree [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.195569] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 08fb85fdc11f407e9b8962135f098095 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.203392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08fb85fdc11f407e9b8962135f098095 [ 1312.204297] env[61649]: DEBUG nova.scheduler.client.report [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1312.206527] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 82996c228b53482683ae60aa6b0d14f0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.216809] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82996c228b53482683ae60aa6b0d14f0 [ 1312.217494] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.371s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.218008] env[61649]: ERROR nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1312.218008] env[61649]: Faults: ['InvalidArgument'] [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Traceback (most recent call last): [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self.driver.spawn(context, instance, image_meta, [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self._fetch_image_if_missing(context, vi) [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] image_cache(vi, tmp_image_ds_loc) [ 1312.218008] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] vm_util.copy_virtual_disk( [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] session._wait_for_task(vmdk_copy_task) [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] return self.wait_for_task(task_ref) [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] return evt.wait() [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] result = hub.switch() [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] return self.greenlet.switch() [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1312.218337] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] self.f(*self.args, **self.kw) [ 1312.218685] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1312.218685] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] raise exceptions.translate_fault(task_info.error) [ 1312.218685] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1312.218685] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Faults: ['InvalidArgument'] [ 1312.218685] env[61649]: ERROR nova.compute.manager [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] [ 1312.218685] env[61649]: DEBUG nova.compute.utils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1312.220058] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Build of instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 was re-scheduled: A specified parameter was not correct: fileType [ 1312.220058] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1312.220459] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1312.220647] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1312.220819] env[61649]: DEBUG nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1312.220983] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1312.454609] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 06d763f3b59a45f48e60c7de21266320 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.466536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06d763f3b59a45f48e60c7de21266320 [ 1312.467108] env[61649]: DEBUG nova.network.neutron [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.467575] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 5c566261a91046c4b2b485f8715a9c48 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.479205] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c566261a91046c4b2b485f8715a9c48 [ 1312.479866] env[61649]: INFO nova.compute.manager [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Took 0.26 seconds to deallocate network for instance. [ 1312.481658] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 4ed41b514e534939af9a72fbf621f79e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.515600] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ed41b514e534939af9a72fbf621f79e [ 1312.518579] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 35ca7854fc8a45f698bd00b72bf01173 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.548776] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35ca7854fc8a45f698bd00b72bf01173 [ 1312.568940] env[61649]: INFO nova.scheduler.client.report [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleted allocations for instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 [ 1312.579720] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg fc35edbe66154647b96e2608544ead72 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.588476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc35edbe66154647b96e2608544ead72 [ 1312.588996] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e42c9bb6-fa4b-4501-91e4-e6f4574d774f tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 592.984s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.589589] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 1d04f8c6b7d045b79473a1534412dda5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.590297] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 396.725s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.590534] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.590748] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.590915] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.592915] env[61649]: INFO nova.compute.manager [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Terminating instance [ 1312.594520] env[61649]: DEBUG nova.compute.manager [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1312.594711] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1312.595181] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-816ad44e-a19a-435d-86a1-06bd97f474b4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.604215] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df58e213-2d63-4b63-89b9-080285e74d01 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.614525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d04f8c6b7d045b79473a1534412dda5 [ 1312.615180] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1312.616771] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg da37edd5e297407b81a1ecfa50abab56 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.636176] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 29f84900-0805-4ab2-af4d-bd7be2ac94d3 could not be found. [ 1312.636378] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1312.636551] env[61649]: INFO nova.compute.manager [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1312.636793] env[61649]: DEBUG oslo.service.loopingcall [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1312.638595] env[61649]: DEBUG nova.compute.manager [-] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1312.638595] env[61649]: DEBUG nova.network.neutron [-] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1312.648825] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da37edd5e297407b81a1ecfa50abab56 [ 1312.655405] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fecf9985fc6643f4afd01b78ea33f1ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.665358] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.665586] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.667025] env[61649]: INFO nova.compute.claims [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1312.668634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg d48dbc9e6b83434a857121087ff1918b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.670610] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fecf9985fc6643f4afd01b78ea33f1ce [ 1312.670919] env[61649]: DEBUG nova.network.neutron [-] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.671221] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a976f717e02545cea694dc6040fb9d1f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.677086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a976f717e02545cea694dc6040fb9d1f [ 1312.677518] env[61649]: INFO nova.compute.manager [-] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] Took 0.04 seconds to deallocate network for instance. [ 1312.681317] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 7edd300cb50947ac84416263e79de58c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.710230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d48dbc9e6b83434a857121087ff1918b [ 1312.711935] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg c9a6c20dae074e9d8bd3be392077aed0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.718556] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9a6c20dae074e9d8bd3be392077aed0 [ 1312.722297] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7edd300cb50947ac84416263e79de58c [ 1312.735469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 6c1105be5a664302ba40a2574f2c6d61 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.769913] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c1105be5a664302ba40a2574f2c6d61 [ 1312.772457] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.772871] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7e8f793e-8739-4c94-9204-95f0e6d6129c tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 0cac4d1c0cac456db62e2897e575b195 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.773884] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 184.746s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.774232] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 29f84900-0805-4ab2-af4d-bd7be2ac94d3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1312.774482] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "29f84900-0805-4ab2-af4d-bd7be2ac94d3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.781483] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cac4d1c0cac456db62e2897e575b195 [ 1312.881471] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c53b80-b516-408f-959a-712f075bbbe4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.889209] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d6dcba-7171-4104-ac85-4aa9f1c2e3f4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.918209] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a284aa-c1e9-4feb-bea1-e0cb01aa1198 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.924832] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3538399-7967-404e-b3fb-fc669fe4d358 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.928608] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.939057] env[61649]: DEBUG nova.compute.provider_tree [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.939704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 9699d3a033784461a9bb53da0b6a6c5e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.947839] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9699d3a033784461a9bb53da0b6a6c5e [ 1312.948817] env[61649]: DEBUG nova.scheduler.client.report [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1312.951211] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 0cb7e66defc946f398ec3c1408bd4330 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.963222] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb7e66defc946f398ec3c1408bd4330 [ 1312.963991] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.964586] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1312.966353] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg acc42c288a6843bc8ed358e133a5e2e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.995531] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acc42c288a6843bc8ed358e133a5e2e3 [ 1312.997232] env[61649]: DEBUG nova.compute.utils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1312.998010] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 1f2598bf8ff147d8a9c2233c8cad33b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1312.999168] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1312.999613] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1313.006345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f2598bf8ff147d8a9c2233c8cad33b0 [ 1313.006941] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1313.008797] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 3dfb02009a34496f8eced6513502dcff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1313.035718] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dfb02009a34496f8eced6513502dcff [ 1313.038608] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 7995c46023f04bf29c4be857a790ad5f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1313.042230] env[61649]: DEBUG nova.policy [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0551dadfa2643d18f591f7c00dab53e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e0bd6f2d26e442f92498e358016a346', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1313.066342] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7995c46023f04bf29c4be857a790ad5f [ 1313.067543] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1313.092961] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1313.093196] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1313.093354] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.093535] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1313.093694] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.093829] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1313.094029] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1313.094185] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1313.094352] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1313.094515] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1313.094686] env[61649]: DEBUG nova.virt.hardware [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1313.096028] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88707b6e-cbba-4ef8-a9b3-7ca717a94134 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.103138] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bacb84-e791-4741-ad8b-a4abd613bd72 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.393099] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Successfully created port: f229c29a-a756-4bb0-bf5b-b3c27dbf082e {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1313.921583] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Successfully updated port: f229c29a-a756-4bb0-bf5b-b3c27dbf082e {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1313.922077] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 76eb4f4bd98b47679984adacd32d0bf1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1313.928551] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.931525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76eb4f4bd98b47679984adacd32d0bf1 [ 1313.932188] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "refresh_cache-aa39503a-2342-421e-928f-35ec7c8e47fb" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.932316] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "refresh_cache-aa39503a-2342-421e-928f-35ec7c8e47fb" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.932490] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1313.932833] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg cb0c64f03f8147ec96ae9b57766e139f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1313.941335] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb0c64f03f8147ec96ae9b57766e139f [ 1313.972535] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1314.114689] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Updating instance_info_cache with network_info: [{"id": "f229c29a-a756-4bb0-bf5b-b3c27dbf082e", "address": "fa:16:3e:ba:85:49", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf229c29a-a7", "ovs_interfaceid": "f229c29a-a756-4bb0-bf5b-b3c27dbf082e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.115214] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg b5fa61dea5d34100bb361eeee6d84f31 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1314.127669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5fa61dea5d34100bb361eeee6d84f31 [ 1314.128231] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "refresh_cache-aa39503a-2342-421e-928f-35ec7c8e47fb" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.128488] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance network_info: |[{"id": "f229c29a-a756-4bb0-bf5b-b3c27dbf082e", "address": "fa:16:3e:ba:85:49", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf229c29a-a7", "ovs_interfaceid": "f229c29a-a756-4bb0-bf5b-b3c27dbf082e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1314.128862] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:85:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f229c29a-a756-4bb0-bf5b-b3c27dbf082e', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1314.136267] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating folder: Project (2e0bd6f2d26e442f92498e358016a346). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1314.136757] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bec4bd8-2d1c-43b5-8040-48e22f508d50 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.147225] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created folder: Project (2e0bd6f2d26e442f92498e358016a346) in parent group-v51588. [ 1314.147424] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating folder: Instances. Parent ref: group-v51668. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1314.147648] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e07c2b6-b63e-48c0-bc08-54d2d7d9028a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.156220] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created folder: Instances in parent group-v51668. [ 1314.156444] env[61649]: DEBUG oslo.service.loopingcall [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1314.156619] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1314.156802] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8834236-91a4-4533-8984-c550272182c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.174784] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1314.174784] env[61649]: value = "task-158224" [ 1314.174784] env[61649]: _type = "Task" [ 1314.174784] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.181781] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158224, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.573630] env[61649]: DEBUG nova.compute.manager [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Received event network-vif-plugged-f229c29a-a756-4bb0-bf5b-b3c27dbf082e {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1314.573844] env[61649]: DEBUG oslo_concurrency.lockutils [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Acquiring lock "aa39503a-2342-421e-928f-35ec7c8e47fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.574048] env[61649]: DEBUG oslo_concurrency.lockutils [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.574221] env[61649]: DEBUG oslo_concurrency.lockutils [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.574388] env[61649]: DEBUG nova.compute.manager [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] No waiting events found dispatching network-vif-plugged-f229c29a-a756-4bb0-bf5b-b3c27dbf082e {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1314.574558] env[61649]: WARNING nova.compute.manager [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Received unexpected event network-vif-plugged-f229c29a-a756-4bb0-bf5b-b3c27dbf082e for instance with vm_state building and task_state spawning. [ 1314.574721] env[61649]: DEBUG nova.compute.manager [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Received event network-changed-f229c29a-a756-4bb0-bf5b-b3c27dbf082e {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1314.574897] env[61649]: DEBUG nova.compute.manager [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Refreshing instance network info cache due to event network-changed-f229c29a-a756-4bb0-bf5b-b3c27dbf082e. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1314.575055] env[61649]: DEBUG oslo_concurrency.lockutils [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Acquiring lock "refresh_cache-aa39503a-2342-421e-928f-35ec7c8e47fb" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.575192] env[61649]: DEBUG oslo_concurrency.lockutils [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Acquired lock "refresh_cache-aa39503a-2342-421e-928f-35ec7c8e47fb" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.575346] env[61649]: DEBUG nova.network.neutron [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Refreshing network info cache for port f229c29a-a756-4bb0-bf5b-b3c27dbf082e {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1314.575816] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Expecting reply to msg e918677e1fb04321a3a5f7aa62963f16 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1314.582941] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e918677e1fb04321a3a5f7aa62963f16 [ 1314.685447] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158224, 'name': CreateVM_Task, 'duration_secs': 0.284369} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.685626] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1314.686303] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.686469] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.686788] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1314.687036] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-376a6ec0-5058-4658-b939-3329f128f9e0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.693800] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 1314.693800] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c4cd83-36d2-b062-972a-a024e96eeefb" [ 1314.693800] env[61649]: _type = "Task" [ 1314.693800] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.703151] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c4cd83-36d2-b062-972a-a024e96eeefb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.847412] env[61649]: DEBUG nova.network.neutron [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Updated VIF entry in instance network info cache for port f229c29a-a756-4bb0-bf5b-b3c27dbf082e. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1314.847853] env[61649]: DEBUG nova.network.neutron [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Updating instance_info_cache with network_info: [{"id": "f229c29a-a756-4bb0-bf5b-b3c27dbf082e", "address": "fa:16:3e:ba:85:49", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf229c29a-a7", "ovs_interfaceid": "f229c29a-a756-4bb0-bf5b-b3c27dbf082e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.848411] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Expecting reply to msg 052615882d544073abaf3476be40eced in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1314.867380] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 052615882d544073abaf3476be40eced [ 1314.868054] env[61649]: DEBUG oslo_concurrency.lockutils [req-58ab9a01-e365-4b75-b948-d022b4746ebe req-b3ad18d3-b903-4fa0-8404-9e4acdcd174b service nova] Releasing lock "refresh_cache-aa39503a-2342-421e-928f-35ec7c8e47fb" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.928582] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.928952] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1314.928952] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1314.929518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 670175e73c7d44e584db7a48d01bfe41 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1314.947240] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 670175e73c7d44e584db7a48d01bfe41 [ 1314.949595] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.949738] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.949871] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.949993] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950112] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950231] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950372] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950501] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950618] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950731] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1314.950846] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1314.951315] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.204430] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.204900] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1315.205140] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.929611] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.929935] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.930345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3373dbad26bc439eb309b719a277fcce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1316.939390] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3373dbad26bc439eb309b719a277fcce [ 1316.940524] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.940746] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.940911] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.941066] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1316.942251] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1ba5a2-8170-4ce6-886b-68fb689ec0e3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.950827] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79392695-92bb-4c09-9499-01f03d732dbc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.964228] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d13b4de-4536-4a40-86f9-bc09624cc239 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.970149] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9d5e7c-f9fb-4765-8f8d-aa31e0e0d3f5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.997825] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181720MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1316.997985] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.998157] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.998977] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e540968990c54d5e9c8090a739924592 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.033015] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e540968990c54d5e9c8090a739924592 [ 1317.037815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 10ec4de5efe1455e9a3602f43d942aca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.046304] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10ec4de5efe1455e9a3602f43d942aca [ 1317.063618] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance bf2399eb-b2df-43b3-bddd-48692825c40a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.063883] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 6ab197e9-3e38-4b37-b625-c30b6977261a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064124] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064277] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064402] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064545] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064674] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064793] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.064907] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.065019] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1317.065569] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2371c118363b466aaaf4763006bcb3e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.075827] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2371c118363b466aaaf4763006bcb3e0 [ 1317.076545] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.077054] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e2cd7be8dd764f1ca9fc8081fbc62046 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.086128] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2cd7be8dd764f1ca9fc8081fbc62046 [ 1317.086770] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.087219] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 046b40d1496042e9bba5d61413615334 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.096914] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 046b40d1496042e9bba5d61413615334 [ 1317.097702] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.098160] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg be03335324bd410a90753d5bc57428b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.107946] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be03335324bd410a90753d5bc57428b7 [ 1317.108643] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.109113] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f6b591784b614aed970df214a3e7b901 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.118892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6b591784b614aed970df214a3e7b901 [ 1317.119623] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 36d37cda-b987-4c5a-8af1-6eede009e61e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.119844] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1317.119988] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1317.289854] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027da219-3dd7-4414-85a7-8d5dcfe303b9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.297703] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3935a2-552f-4d6e-96f4-ac787a5ce85a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.327773] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c20b98-d207-4002-a17c-b31a21e900ea {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.334407] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0523985-3edb-40c0-a612-8a7d6afd275e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.346592] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.347050] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d6d6dba49109495abe44d8807c31d019 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.358445] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6d6dba49109495abe44d8807c31d019 [ 1317.359282] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1317.361502] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e453c79f6ffd4256a470f59632d5ac36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1317.371704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e453c79f6ffd4256a470f59632d5ac36 [ 1317.372341] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1317.372523] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.374s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.140811] env[61649]: WARNING oslo_vmware.rw_handles [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1360.140811] env[61649]: ERROR oslo_vmware.rw_handles [ 1360.141715] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1360.142961] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1360.143197] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Copying Virtual Disk [datastore1] vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/35b1c3ca-a20b-4219-9404-8d4161d8054e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1360.143494] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87a6c739-2d56-40d4-a6f2-dde6083d1be6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.152258] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Waiting for the task: (returnval){ [ 1360.152258] env[61649]: value = "task-158225" [ 1360.152258] env[61649]: _type = "Task" [ 1360.152258] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.159801] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Task: {'id': task-158225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.662179] env[61649]: DEBUG oslo_vmware.exceptions [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1360.662497] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.663065] env[61649]: ERROR nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1360.663065] env[61649]: Faults: ['InvalidArgument'] [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Traceback (most recent call last): [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] yield resources [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self.driver.spawn(context, instance, image_meta, [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self._fetch_image_if_missing(context, vi) [ 1360.663065] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] image_cache(vi, tmp_image_ds_loc) [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] vm_util.copy_virtual_disk( [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] session._wait_for_task(vmdk_copy_task) [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] return self.wait_for_task(task_ref) [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] return evt.wait() [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] result = hub.switch() [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1360.663369] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] return self.greenlet.switch() [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self.f(*self.args, **self.kw) [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] raise exceptions.translate_fault(task_info.error) [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Faults: ['InvalidArgument'] [ 1360.663670] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] [ 1360.663670] env[61649]: INFO nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Terminating instance [ 1360.664965] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.665173] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.665412] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a7fe9f0-f173-4f69-8058-e09eaf718859 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.667548] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1360.667740] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1360.668453] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46687e5c-4820-4193-b3e1-40290565c883 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.675289] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1360.675525] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4999f7bd-375d-4afd-bb46-2667d81bec0b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.677741] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.677918] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1360.678916] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d54699-e91b-45b7-bc29-352ed4ee465f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.683569] env[61649]: DEBUG oslo_vmware.api [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Waiting for the task: (returnval){ [ 1360.683569] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ec8ee3-011d-7d6d-2861-9cebc3954c21" [ 1360.683569] env[61649]: _type = "Task" [ 1360.683569] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.697707] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1360.697958] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Creating directory with path [datastore1] vmware_temp/add41560-95ab-4b8b-b5f0-fdc070b155a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.698181] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1f94d18-838f-4d72-81fb-5b6298d842e5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.718212] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Created directory with path [datastore1] vmware_temp/add41560-95ab-4b8b-b5f0-fdc070b155a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.718477] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Fetch image to [datastore1] vmware_temp/add41560-95ab-4b8b-b5f0-fdc070b155a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1360.718655] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/add41560-95ab-4b8b-b5f0-fdc070b155a7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1360.719481] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bedc69d-369c-4972-a1bc-0a0f99e8718e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.726604] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2e011c-a946-46ba-9824-2923538c14e6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.736251] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e4ee07-d616-4140-8450-e6397744a22e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.743177] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1360.743407] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1360.743591] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Deleting the datastore file [datastore1] bf2399eb-b2df-43b3-bddd-48692825c40a {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.767225] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c273737b-ab37-4e38-88fd-fa9d68704969 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.769934] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5faf1f-60ee-4865-b62b-b4f7bb3f2617 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.776988] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0641674b-c6a7-460e-81a2-8e07c6657afe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.779382] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Waiting for the task: (returnval){ [ 1360.779382] env[61649]: value = "task-158227" [ 1360.779382] env[61649]: _type = "Task" [ 1360.779382] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.788246] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Task: {'id': task-158227, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.799573] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1360.888541] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.889410] env[61649]: ERROR nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = getattr(controller, method)(*args, **kwargs) [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._get(image_id) [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1360.889410] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] resp, body = self.http_client.get(url, headers=header) [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.request(url, 'GET', **kwargs) [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._handle_response(resp) [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exc.from_response(resp, resp.content) [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1360.889762] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] yield resources [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self.driver.spawn(context, instance, image_meta, [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._fetch_image_if_missing(context, vi) [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image_fetch(context, vi, tmp_image_ds_loc) [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] images.fetch_image( [ 1360.890092] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] metadata = IMAGE_API.get(context, image_ref) [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return session.show(context, image_id, [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] _reraise_translated_image_exception(image_id) [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise new_exc.with_traceback(exc_trace) [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = getattr(controller, method)(*args, **kwargs) [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1360.890479] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._get(image_id) [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] resp, body = self.http_client.get(url, headers=header) [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.request(url, 'GET', **kwargs) [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._handle_response(resp) [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exc.from_response(resp, resp.content) [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1360.890829] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1360.891147] env[61649]: INFO nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Terminating instance [ 1360.891276] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.891483] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.892104] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1360.892294] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1360.892544] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fb6296b-168a-46fc-95bc-9b9ce904eb14 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.894967] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe2855c-111c-4179-8388-1d84e1d968b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.902051] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1360.902298] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e38394b-fbf8-4d20-8b5b-85add3d3af93 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.904414] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.904595] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1360.905519] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4f07108-13bd-49c6-b4f8-f33cb5969d92 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.909917] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Waiting for the task: (returnval){ [ 1360.909917] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e0d82c-ed31-56d3-372c-34edd1e93aeb" [ 1360.909917] env[61649]: _type = "Task" [ 1360.909917] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.916761] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e0d82c-ed31-56d3-372c-34edd1e93aeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.955350] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1360.955574] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1360.955826] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Deleting the datastore file [datastore1] 6ab197e9-3e38-4b37-b625-c30b6977261a {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.956035] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76b1fca2-c111-4608-a8f3-74516293cc14 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.961433] env[61649]: DEBUG oslo_vmware.api [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Waiting for the task: (returnval){ [ 1360.961433] env[61649]: value = "task-158229" [ 1360.961433] env[61649]: _type = "Task" [ 1360.961433] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.968472] env[61649]: DEBUG oslo_vmware.api [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Task: {'id': task-158229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.288529] env[61649]: DEBUG oslo_vmware.api [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Task: {'id': task-158227, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070514} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.288811] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.288951] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1361.289125] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1361.289316] env[61649]: INFO nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1361.291456] env[61649]: DEBUG nova.compute.claims [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1361.291665] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.291853] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.293668] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg aa69d242ae9d41148085bcae4db7446a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.325832] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa69d242ae9d41148085bcae4db7446a [ 1361.421063] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1361.421314] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Creating directory with path [datastore1] vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.421536] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddeaa307-c814-4b03-b456-187ac4321f35 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.433660] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Created directory with path [datastore1] vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.433858] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Fetch image to [datastore1] vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1361.434027] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1361.434739] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c9424f-4c00-4f97-9a76-ff65300b8ac9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.441282] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a3b9c4-3702-4148-9065-b46517aaebb1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.452241] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a36773-04e7-4b33-b730-65f03b97e9df {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.490009] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039477a8-9e2c-4f59-b8fc-7e86218220ba {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.496713] env[61649]: DEBUG oslo_vmware.api [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Task: {'id': task-158229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069825} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.498098] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.498282] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1361.498451] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1361.498622] env[61649]: INFO nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1361.500470] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bad98581-2910-4ec1-bb62-d1b2d1144efc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.502264] env[61649]: DEBUG nova.compute.claims [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1361.502435] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.522661] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1361.536019] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8f83c9-2b49-4e73-8bfe-e7ee1634844e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.545058] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2eafbc-729a-49b0-9308-99e5f603d31c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.577965] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48448eac-a1d7-462c-981f-84c14449e16d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.584682] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13c8f8d-56ab-46fc-90cd-4ade55b9e660 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.597267] env[61649]: DEBUG nova.compute.provider_tree [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.597755] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 760b45ca503a48c286e281133d5a4a85 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.599163] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1361.657810] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 760b45ca503a48c286e281133d5a4a85 [ 1361.658957] env[61649]: DEBUG nova.scheduler.client.report [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1361.661530] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 58faf44b03f54ea7a530c94c3f36afba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.664510] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1361.664697] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1361.672420] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58faf44b03f54ea7a530c94c3f36afba [ 1361.673619] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.382s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.674431] env[61649]: ERROR nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1361.674431] env[61649]: Faults: ['InvalidArgument'] [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Traceback (most recent call last): [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self.driver.spawn(context, instance, image_meta, [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self._fetch_image_if_missing(context, vi) [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] image_cache(vi, tmp_image_ds_loc) [ 1361.674431] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] vm_util.copy_virtual_disk( [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] session._wait_for_task(vmdk_copy_task) [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] return self.wait_for_task(task_ref) [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] return evt.wait() [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] result = hub.switch() [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] return self.greenlet.switch() [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1361.674844] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] self.f(*self.args, **self.kw) [ 1361.675187] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1361.675187] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] raise exceptions.translate_fault(task_info.error) [ 1361.675187] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1361.675187] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Faults: ['InvalidArgument'] [ 1361.675187] env[61649]: ERROR nova.compute.manager [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] [ 1361.675322] env[61649]: DEBUG nova.compute.utils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1361.676521] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.174s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.678427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg ed0ab1f358a64664847415105c628c32 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.679912] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Build of instance bf2399eb-b2df-43b3-bddd-48692825c40a was re-scheduled: A specified parameter was not correct: fileType [ 1361.679912] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1361.680360] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1361.680533] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1361.680679] env[61649]: DEBUG nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1361.680875] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1361.707936] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed0ab1f358a64664847415105c628c32 [ 1361.897377] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93338bc-ad9a-47a4-b7da-b54fe8448506 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.904959] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d36cf1-6598-4d25-9224-7592d5a8d0ce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.953394] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61070e76-0262-4b70-9d3c-c58fd2084713 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.960510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 5b9c2dede1574ebba6403b78668795be in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.962951] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ac2741-c873-4225-8da1-73f77cb4bab8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.967907] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b9c2dede1574ebba6403b78668795be [ 1361.968541] env[61649]: DEBUG nova.network.neutron [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.969045] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 42c9ee2dfebf4e6491e25225a7d443cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.978741] env[61649]: DEBUG nova.compute.provider_tree [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.979188] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg d32ad505231843f8ac6753328efbce94 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.981724] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42c9ee2dfebf4e6491e25225a7d443cb [ 1361.982714] env[61649]: INFO nova.compute.manager [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Took 0.30 seconds to deallocate network for instance. [ 1361.984486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg ab35a2ea2b2145b1a110f71699d1efb5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.985947] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d32ad505231843f8ac6753328efbce94 [ 1361.986747] env[61649]: DEBUG nova.scheduler.client.report [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1361.988944] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 4125a204825640d7802aea17a0fccc9a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1361.998476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4125a204825640d7802aea17a0fccc9a [ 1361.999144] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.999863] env[61649]: ERROR nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = getattr(controller, method)(*args, **kwargs) [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._get(image_id) [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1361.999863] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] resp, body = self.http_client.get(url, headers=header) [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.request(url, 'GET', **kwargs) [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._handle_response(resp) [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exc.from_response(resp, resp.content) [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.000201] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self.driver.spawn(context, instance, image_meta, [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._fetch_image_if_missing(context, vi) [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image_fetch(context, vi, tmp_image_ds_loc) [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] images.fetch_image( [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] metadata = IMAGE_API.get(context, image_ref) [ 1362.000507] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return session.show(context, image_id, [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] _reraise_translated_image_exception(image_id) [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise new_exc.with_traceback(exc_trace) [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = getattr(controller, method)(*args, **kwargs) [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._get(image_id) [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.000855] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] resp, body = self.http_client.get(url, headers=header) [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.request(url, 'GET', **kwargs) [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._handle_response(resp) [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exc.from_response(resp, resp.content) [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1362.001199] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.001199] env[61649]: DEBUG nova.compute.utils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1362.001925] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Build of instance 6ab197e9-3e38-4b37-b625-c30b6977261a was re-scheduled: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1362.002387] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1362.002562] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1362.002948] env[61649]: DEBUG nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1362.002948] env[61649]: DEBUG nova.network.neutron [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1362.014276] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab35a2ea2b2145b1a110f71699d1efb5 [ 1362.017144] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg bb09c7a90f514a55815a8881d0a6236e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.047581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb09c7a90f514a55815a8881d0a6236e [ 1362.068446] env[61649]: INFO nova.scheduler.client.report [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Deleted allocations for instance bf2399eb-b2df-43b3-bddd-48692825c40a [ 1362.074733] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 173379e551fc49d8920f1e7a7d36e768 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.086951] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 173379e551fc49d8920f1e7a7d36e768 [ 1362.087514] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f075caa-e2c8-4793-9fd1-4830ba42cb59 tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.674s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.088074] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg d9748391f889436480c83ff017438db2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.089172] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.670s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.089407] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Acquiring lock "bf2399eb-b2df-43b3-bddd-48692825c40a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.089622] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.089787] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.092464] env[61649]: INFO nova.compute.manager [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Terminating instance [ 1362.093402] env[61649]: DEBUG nova.compute.manager [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1362.093595] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1362.094056] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4abc527d-c97e-4a41-a89d-e0366fbe19d2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.096650] env[61649]: DEBUG neutronclient.v2_0.client [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1362.098228] env[61649]: ERROR nova.compute.manager [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = getattr(controller, method)(*args, **kwargs) [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._get(image_id) [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.098228] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] resp, body = self.http_client.get(url, headers=header) [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.request(url, 'GET', **kwargs) [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._handle_response(resp) [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exc.from_response(resp, resp.content) [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.098547] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self.driver.spawn(context, instance, image_meta, [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._fetch_image_if_missing(context, vi) [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image_fetch(context, vi, tmp_image_ds_loc) [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] images.fetch_image( [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] metadata = IMAGE_API.get(context, image_ref) [ 1362.098897] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return session.show(context, image_id, [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] _reraise_translated_image_exception(image_id) [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise new_exc.with_traceback(exc_trace) [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = getattr(controller, method)(*args, **kwargs) [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._get(image_id) [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.099242] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] resp, body = self.http_client.get(url, headers=header) [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.request(url, 'GET', **kwargs) [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self._handle_response(resp) [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exc.from_response(resp, resp.content) [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.099611] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._build_and_run_instance(context, instance, image, [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exception.RescheduledException( [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] nova.exception.RescheduledException: Build of instance 6ab197e9-3e38-4b37-b625-c30b6977261a was re-scheduled: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1362.099947] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] exception_handler_v20(status_code, error_body) [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise client_exc(message=error_message, [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Neutron server returns request_ids: ['req-b897cef9-51ab-4d2e-bfdc-c839f2a2ee75'] [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._deallocate_network(context, instance, requested_networks) [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self.network_api.deallocate_for_instance( [ 1362.100372] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] data = neutron.list_ports(**search_opts) [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.list('ports', self.ports_path, retrieve_all, [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] for r in self._pagination(collection, path, **params): [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] res = self.get(path, params=params) [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.100729] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.retry_request("GET", action, body=body, [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.do_request(method, action, body=body, [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._handle_fault_response(status_code, replybody, resp) [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exception.Unauthorized() [ 1362.101062] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] nova.exception.Unauthorized: Not authorized. [ 1362.101370] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.101370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 24f0b2b4d770452aa6bd95ea50c00156 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.109818] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702b61fb-7d70-4733-be9f-fd36718991b0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.120509] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9748391f889436480c83ff017438db2 [ 1362.121018] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1362.122597] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 51fbe11e4f9f43aaaacd944f4cf699c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.141609] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24f0b2b4d770452aa6bd95ea50c00156 [ 1362.142098] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf2399eb-b2df-43b3-bddd-48692825c40a could not be found. [ 1362.142284] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1362.142461] env[61649]: INFO nova.compute.manager [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1362.142710] env[61649]: DEBUG oslo.service.loopingcall [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.145503] env[61649]: DEBUG nova.compute.manager [-] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1362.145608] env[61649]: DEBUG nova.network.neutron [-] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1362.163723] env[61649]: INFO nova.scheduler.client.report [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Deleted allocations for instance 6ab197e9-3e38-4b37-b625-c30b6977261a [ 1362.169679] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51fbe11e4f9f43aaaacd944f4cf699c4 [ 1362.172252] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg ed8d7ee4d71f4e02bc0a4500a5d3e6f4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.177019] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f706bd5f4a9c47e0b5b70bb6ac0b6928 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.185855] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.186228] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.188243] env[61649]: INFO nova.compute.claims [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1362.189944] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 8625fc78833e4361beb0a03842d21751 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.191273] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed8d7ee4d71f4e02bc0a4500a5d3e6f4 [ 1362.191793] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f706bd5f4a9c47e0b5b70bb6ac0b6928 [ 1362.192493] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fa6b84e4-c663-4c46-bf1e-0f5c20e99f80 tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 572.420s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.192936] env[61649]: DEBUG nova.network.neutron [-] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.193367] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c7d4f4b71bd0481c834ad988e2b42361 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.194441] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 8cf4ad0308ae40dbb68784d8d1316365 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.194895] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 376.379s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.195230] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Acquiring lock "6ab197e9-3e38-4b37-b625-c30b6977261a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.195541] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.195915] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.198251] env[61649]: INFO nova.compute.manager [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Terminating instance [ 1362.200537] env[61649]: DEBUG nova.compute.manager [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1362.200739] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1362.201189] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-322f7b20-48bb-4cd3-8a61-f72d371ffc39 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.203440] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7d4f4b71bd0481c834ad988e2b42361 [ 1362.206227] env[61649]: INFO nova.compute.manager [-] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] Took 0.06 seconds to deallocate network for instance. [ 1362.209459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg 2276487b6260462884646f173bead198 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.214746] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c52a5cf-0f92-4821-ba20-5bc7e6faf417 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.225969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cf4ad0308ae40dbb68784d8d1316365 [ 1362.225969] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1362.227753] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg e70f43fc6ded4705b44a960abfd3ea87 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.228798] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8625fc78833e4361beb0a03842d21751 [ 1362.230711] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 8cefdb6c06da4f0386d6c3a89b9d81e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.245973] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cefdb6c06da4f0386d6c3a89b9d81e5 [ 1362.246463] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ab197e9-3e38-4b37-b625-c30b6977261a could not be found. [ 1362.246652] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1362.246828] env[61649]: INFO nova.compute.manager [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1362.247065] env[61649]: DEBUG oslo.service.loopingcall [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.249826] env[61649]: DEBUG nova.compute.manager [-] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1362.249933] env[61649]: DEBUG nova.network.neutron [-] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1362.256802] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2276487b6260462884646f173bead198 [ 1362.260197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e70f43fc6ded4705b44a960abfd3ea87 [ 1362.271324] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg fe84387521894409a35cb85a92ddee45 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.276067] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.309239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe84387521894409a35cb85a92ddee45 [ 1362.312683] env[61649]: DEBUG oslo_concurrency.lockutils [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.223s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.312768] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-aaf3d23c-6358-46ed-a5d2-ae2ae86e790f tempest-InstanceActionsV221TestJSON-118626687 tempest-InstanceActionsV221TestJSON-118626687-project-member] Expecting reply to msg c6d65bfd8a6f4e62b1d3591ff0c5b778 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.313380] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 234.285s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.313887] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: bf2399eb-b2df-43b3-bddd-48692825c40a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1362.313887] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "bf2399eb-b2df-43b3-bddd-48692825c40a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.322928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6d65bfd8a6f4e62b1d3591ff0c5b778 [ 1362.383270] env[61649]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1362.383550] env[61649]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-056acf41-4174-4941-9c7b-ea463c6d0c3b'] [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1362.384125] env[61649]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1362.384507] env[61649]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.384899] env[61649]: ERROR oslo.service.loopingcall [ 1362.385314] env[61649]: ERROR nova.compute.manager [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.386436] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 07e0687a8725423db615e5796d17579d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.421272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e0687a8725423db615e5796d17579d [ 1362.422997] env[61649]: ERROR nova.compute.manager [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] exception_handler_v20(status_code, error_body) [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise client_exc(message=error_message, [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Neutron server returns request_ids: ['req-056acf41-4174-4941-9c7b-ea463c6d0c3b'] [ 1362.422997] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During handling of the above exception, another exception occurred: [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Traceback (most recent call last): [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._delete_instance(context, instance, bdms) [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._shutdown_instance(context, instance, bdms) [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._try_deallocate_network(context, instance, requested_networks) [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] with excutils.save_and_reraise_exception(): [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1362.423302] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self.force_reraise() [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise self.value [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] _deallocate_network_with_retries() [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return evt.wait() [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = hub.switch() [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.greenlet.switch() [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1362.423610] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = func(*self.args, **self.kw) [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] result = f(*args, **kwargs) [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._deallocate_network( [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self.network_api.deallocate_for_instance( [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] data = neutron.list_ports(**search_opts) [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.list('ports', self.ports_path, retrieve_all, [ 1362.424045] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] for r in self._pagination(collection, path, **params): [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] res = self.get(path, params=params) [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.retry_request("GET", action, body=body, [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1362.424357] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] return self.do_request(method, action, body=body, [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] ret = obj(*args, **kwargs) [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] self._handle_fault_response(status_code, replybody, resp) [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.424657] env[61649]: ERROR nova.compute.manager [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] [ 1362.425616] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 48660bba99c945acb46c02d245c045f6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.432078] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6afa138-ad6b-4773-9a85-27080052a2b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.439522] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7e85bc-eb35-4cec-98b8-8fb9b86ea407 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.470345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48660bba99c945acb46c02d245c045f6 [ 1362.471668] env[61649]: DEBUG oslo_concurrency.lockutils [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.277s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.472564] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 3b034d46365345a7bc4842a874d44591 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.473995] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff1c6ac-4289-473c-b93a-63108e564ba3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.476704] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 234.449s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.476887] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1362.477057] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "6ab197e9-3e38-4b37-b625-c30b6977261a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.479159] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b034d46365345a7bc4842a874d44591 [ 1362.480075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 9dd6cc5e07804e4c9f9dbf0033f77776 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.487701] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839dd4c1-26c4-4453-8ec5-0e99389dfc8f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.492726] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dd6cc5e07804e4c9f9dbf0033f77776 [ 1362.494543] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Expecting reply to msg 30a5b7a4cb88460d9f07721a4228ec68 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.508393] env[61649]: DEBUG nova.compute.provider_tree [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.508883] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 672fcfd1209f4a39a3084610d2aa38ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.516434] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 672fcfd1209f4a39a3084610d2aa38ab [ 1362.517306] env[61649]: DEBUG nova.scheduler.client.report [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1362.519581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 99c2f2681ffd4371b262e9b556a32982 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.520693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30a5b7a4cb88460d9f07721a4228ec68 [ 1362.521592] env[61649]: INFO nova.compute.manager [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] [instance: 6ab197e9-3e38-4b37-b625-c30b6977261a] Successfully reverted task state from None on failure for instance. [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server [None req-0b3639e2-e033-47ea-8483-3cbcdc0292fb tempest-MigrationsAdminTest-1299306010 tempest-MigrationsAdminTest-1299306010-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-056acf41-4174-4941-9c7b-ea463c6d0c3b'] [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1362.525069] env[61649]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1362.525602] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1362.526097] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1362.526701] env[61649]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.527184] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.527695] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1362.528206] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1362.528206] env[61649]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1362.528206] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1362.528206] env[61649]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1362.528206] env[61649]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1362.528206] env[61649]: ERROR oslo_messaging.rpc.server [ 1362.532786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99c2f2681ffd4371b262e9b556a32982 [ 1362.533469] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.347s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.533954] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1362.535451] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg d2c1ad5809f04e64b836432ebe435cac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.536298] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.260s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.538276] env[61649]: INFO nova.compute.claims [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1362.540035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 39d103f09ff742708af5bf632c5f6243 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.564635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2c1ad5809f04e64b836432ebe435cac [ 1362.566011] env[61649]: DEBUG nova.compute.utils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1362.566701] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg ed86ae5da41c4c629c86a42cf868b852 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.567682] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1362.567751] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1362.570661] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39d103f09ff742708af5bf632c5f6243 [ 1362.572446] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg a8804fd3dc5f4f37bfaf4bf291576f3e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.578167] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed86ae5da41c4c629c86a42cf868b852 [ 1362.578967] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1362.580917] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 7549bb4548204a76a0af5d18cae025d0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.581969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8804fd3dc5f4f37bfaf4bf291576f3e [ 1362.614632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7549bb4548204a76a0af5d18cae025d0 [ 1362.617466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg cabb24956f954501ad3bc879ef0662ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.624235] env[61649]: DEBUG nova.policy [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e1c68c4f2ce4433aa498ec2dca7f467', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd465a44feb9f4640b992dac10cd059e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1362.648740] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cabb24956f954501ad3bc879ef0662ab [ 1362.650178] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1362.672447] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1362.672447] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1362.672447] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1362.672716] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1362.672716] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1362.672716] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1362.672716] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1362.672716] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1362.672907] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1362.672907] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1362.672907] env[61649]: DEBUG nova.virt.hardware [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1362.673756] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba2aeef-440e-4ffe-8925-b1a49bfa00b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.684614] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d72c6fd-5ff6-4272-800a-294a4a54ebb1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.703170] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.703398] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.775209] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ccbca3-7b02-4633-8bfc-eee2b8d97b65 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.782304] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f282f7ce-ea2b-4ba8-ab99-90c6f23c3478 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.811396] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55da33c1-b401-4362-94d0-c802c57d2a16 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.818081] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9276e815-36c6-4b06-9917-709ac1be12c8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.830717] env[61649]: DEBUG nova.compute.provider_tree [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.831207] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 75864d567844449cbc6bf5da9403fca2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.838755] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75864d567844449cbc6bf5da9403fca2 [ 1362.839622] env[61649]: DEBUG nova.scheduler.client.report [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1362.841891] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 3f041ac1b258413db1fcfb4a43081e24 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.858785] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f041ac1b258413db1fcfb4a43081e24 [ 1362.866455] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.867356] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1362.873983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 8f5667d53b394ab7a4a16c6db17edc05 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.917082] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f5667d53b394ab7a4a16c6db17edc05 [ 1362.918462] env[61649]: DEBUG nova.compute.utils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1362.919060] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 4e250e6a9f414a35a2cdff8116ed0f93 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.919991] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1362.920228] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1362.937852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e250e6a9f414a35a2cdff8116ed0f93 [ 1362.939415] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1362.945195] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg ac77a00559654766a2033d0ea375cf81 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1362.978892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac77a00559654766a2033d0ea375cf81 [ 1362.978892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg f86bb395bd9f48fcb619ab017f4aaff5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1363.002245] env[61649]: DEBUG nova.policy [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce5ba30ab10a46c393d3b3f3f2786753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc9f358fd6846ab889963f86dd64157', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1363.021797] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f86bb395bd9f48fcb619ab017f4aaff5 [ 1363.023527] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1363.045316] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1363.045760] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1363.046035] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1363.046335] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1363.046586] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1363.046832] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1363.047137] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1363.047437] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1363.047731] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1363.047998] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1363.048308] env[61649]: DEBUG nova.virt.hardware [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1363.049248] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92fc55d-b09a-4577-af28-d9dcd1cf04fd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.058157] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b50a958-d649-4f75-ac41-2f756f4b573f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.126819] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Successfully created port: fa0ba8f2-dd82-4088-819f-a3067a905775 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.425624] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Successfully created port: ecd17dd0-ce77-4f0e-b3da-ab33b857212a {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.739224] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Successfully updated port: fa0ba8f2-dd82-4088-819f-a3067a905775 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1363.739224] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 07ed0814067946729fcb90676382f0e8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1363.745188] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ed0814067946729fcb90676382f0e8 [ 1363.745188] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "refresh_cache-d6e8f17f-40c4-46e0-a900-d92d1da01ed8" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.745188] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquired lock "refresh_cache-d6e8f17f-40c4-46e0-a900-d92d1da01ed8" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.745188] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1363.745188] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 5493ce07b14947399e82485713d0919b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1363.757373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5493ce07b14947399e82485713d0919b [ 1363.782980] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1363.933090] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Updating instance_info_cache with network_info: [{"id": "fa0ba8f2-dd82-4088-819f-a3067a905775", "address": "fa:16:3e:da:d0:8e", "network": {"id": "dee0efa5-059d-4e15-8109-2789ac63f99b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1472235714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d465a44feb9f4640b992dac10cd059e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc6d5964-1106-4345-a26d-185dabd4ff0f", "external-id": "nsx-vlan-transportzone-603", "segmentation_id": 603, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa0ba8f2-dd", "ovs_interfaceid": "fa0ba8f2-dd82-4088-819f-a3067a905775", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.933604] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 49d3911162fd4c72abe311b26404d52f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1363.946933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49d3911162fd4c72abe311b26404d52f [ 1363.947589] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Releasing lock "refresh_cache-d6e8f17f-40c4-46e0-a900-d92d1da01ed8" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.947893] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance network_info: |[{"id": "fa0ba8f2-dd82-4088-819f-a3067a905775", "address": "fa:16:3e:da:d0:8e", "network": {"id": "dee0efa5-059d-4e15-8109-2789ac63f99b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1472235714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d465a44feb9f4640b992dac10cd059e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc6d5964-1106-4345-a26d-185dabd4ff0f", "external-id": "nsx-vlan-transportzone-603", "segmentation_id": 603, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa0ba8f2-dd", "ovs_interfaceid": "fa0ba8f2-dd82-4088-819f-a3067a905775", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1363.948297] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:d0:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc6d5964-1106-4345-a26d-185dabd4ff0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa0ba8f2-dd82-4088-819f-a3067a905775', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1363.956222] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Creating folder: Project (d465a44feb9f4640b992dac10cd059e2). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1363.956707] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5b3be97-8a6c-481c-9dae-9b2265a93b2d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.967722] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Created folder: Project (d465a44feb9f4640b992dac10cd059e2) in parent group-v51588. [ 1363.967904] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Creating folder: Instances. Parent ref: group-v51671. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1363.968141] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff1f5666-2d70-4671-9769-a5843b1642ea {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.976820] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Created folder: Instances in parent group-v51671. [ 1363.977043] env[61649]: DEBUG oslo.service.loopingcall [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.977221] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1363.977414] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fbae8ab-5872-41cf-b1b5-56f513b85663 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.997398] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1363.997398] env[61649]: value = "task-158232" [ 1363.997398] env[61649]: _type = "Task" [ 1363.997398] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.006794] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158232, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.012220] env[61649]: DEBUG nova.compute.manager [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Received event network-vif-plugged-fa0ba8f2-dd82-4088-819f-a3067a905775 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1364.012489] env[61649]: DEBUG oslo_concurrency.lockutils [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Acquiring lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.012708] env[61649]: DEBUG oslo_concurrency.lockutils [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.012999] env[61649]: DEBUG oslo_concurrency.lockutils [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.013173] env[61649]: DEBUG nova.compute.manager [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] No waiting events found dispatching network-vif-plugged-fa0ba8f2-dd82-4088-819f-a3067a905775 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1364.013333] env[61649]: WARNING nova.compute.manager [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Received unexpected event network-vif-plugged-fa0ba8f2-dd82-4088-819f-a3067a905775 for instance with vm_state building and task_state spawning. [ 1364.013507] env[61649]: DEBUG nova.compute.manager [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Received event network-changed-fa0ba8f2-dd82-4088-819f-a3067a905775 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1364.013657] env[61649]: DEBUG nova.compute.manager [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Refreshing instance network info cache due to event network-changed-fa0ba8f2-dd82-4088-819f-a3067a905775. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1364.013837] env[61649]: DEBUG oslo_concurrency.lockutils [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Acquiring lock "refresh_cache-d6e8f17f-40c4-46e0-a900-d92d1da01ed8" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.013968] env[61649]: DEBUG oslo_concurrency.lockutils [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Acquired lock "refresh_cache-d6e8f17f-40c4-46e0-a900-d92d1da01ed8" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.014120] env[61649]: DEBUG nova.network.neutron [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Refreshing network info cache for port fa0ba8f2-dd82-4088-819f-a3067a905775 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1364.014574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Expecting reply to msg 81e34e70ebb84591864bbe28e365ba3a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1364.021836] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81e34e70ebb84591864bbe28e365ba3a [ 1364.203285] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Successfully updated port: ecd17dd0-ce77-4f0e-b3da-ab33b857212a {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.203285] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 384f250da695443194c064ec97861fcc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1364.206146] env[61649]: DEBUG nova.compute.manager [req-d3dc18fd-7894-4b64-8ad6-1905f7b4d28d req-ea4a1111-17fa-4edb-a3fd-45e65585db65 service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Received event network-vif-plugged-ecd17dd0-ce77-4f0e-b3da-ab33b857212a {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1364.206146] env[61649]: DEBUG oslo_concurrency.lockutils [req-d3dc18fd-7894-4b64-8ad6-1905f7b4d28d req-ea4a1111-17fa-4edb-a3fd-45e65585db65 service nova] Acquiring lock "b6243867-9546-4663-9d48-5c040537490b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.206146] env[61649]: DEBUG oslo_concurrency.lockutils [req-d3dc18fd-7894-4b64-8ad6-1905f7b4d28d req-ea4a1111-17fa-4edb-a3fd-45e65585db65 service nova] Lock "b6243867-9546-4663-9d48-5c040537490b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.206146] env[61649]: DEBUG oslo_concurrency.lockutils [req-d3dc18fd-7894-4b64-8ad6-1905f7b4d28d req-ea4a1111-17fa-4edb-a3fd-45e65585db65 service nova] Lock "b6243867-9546-4663-9d48-5c040537490b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.206387] env[61649]: DEBUG nova.compute.manager [req-d3dc18fd-7894-4b64-8ad6-1905f7b4d28d req-ea4a1111-17fa-4edb-a3fd-45e65585db65 service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] No waiting events found dispatching network-vif-plugged-ecd17dd0-ce77-4f0e-b3da-ab33b857212a {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1364.206387] env[61649]: WARNING nova.compute.manager [req-d3dc18fd-7894-4b64-8ad6-1905f7b4d28d req-ea4a1111-17fa-4edb-a3fd-45e65585db65 service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Received unexpected event network-vif-plugged-ecd17dd0-ce77-4f0e-b3da-ab33b857212a for instance with vm_state building and task_state spawning. [ 1364.212472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 384f250da695443194c064ec97861fcc [ 1364.212472] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "refresh_cache-b6243867-9546-4663-9d48-5c040537490b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.212472] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquired lock "refresh_cache-b6243867-9546-4663-9d48-5c040537490b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.212472] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1364.212472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 0bb9a8c2eccb487387d4e6b92bd3a70a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1364.221090] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bb9a8c2eccb487387d4e6b92bd3a70a [ 1364.281596] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1364.347032] env[61649]: DEBUG nova.network.neutron [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Updated VIF entry in instance network info cache for port fa0ba8f2-dd82-4088-819f-a3067a905775. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1364.347381] env[61649]: DEBUG nova.network.neutron [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Updating instance_info_cache with network_info: [{"id": "fa0ba8f2-dd82-4088-819f-a3067a905775", "address": "fa:16:3e:da:d0:8e", "network": {"id": "dee0efa5-059d-4e15-8109-2789ac63f99b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1472235714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d465a44feb9f4640b992dac10cd059e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc6d5964-1106-4345-a26d-185dabd4ff0f", "external-id": "nsx-vlan-transportzone-603", "segmentation_id": 603, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa0ba8f2-dd", "ovs_interfaceid": "fa0ba8f2-dd82-4088-819f-a3067a905775", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.347980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Expecting reply to msg 1b2e4a3377de4877a64e4bdbab2054dd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1364.357848] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b2e4a3377de4877a64e4bdbab2054dd [ 1364.358643] env[61649]: DEBUG oslo_concurrency.lockutils [req-d1071ebd-1a9e-4714-b0d2-ea012235c99f req-d72a9d7f-a88c-4c1d-a3a9-32eb1c8d1020 service nova] Releasing lock "refresh_cache-d6e8f17f-40c4-46e0-a900-d92d1da01ed8" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.428839] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Updating instance_info_cache with network_info: [{"id": "ecd17dd0-ce77-4f0e-b3da-ab33b857212a", "address": "fa:16:3e:d6:f7:be", "network": {"id": "4a3af343-aefd-4beb-b0af-8851348e050d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1038524057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc9f358fd6846ab889963f86dd64157", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecd17dd0-ce", "ovs_interfaceid": "ecd17dd0-ce77-4f0e-b3da-ab33b857212a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.429375] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 9c8d52230d0c4471a5ad8d2ecae93bd1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1364.438768] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c8d52230d0c4471a5ad8d2ecae93bd1 [ 1364.439304] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Releasing lock "refresh_cache-b6243867-9546-4663-9d48-5c040537490b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.439646] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance network_info: |[{"id": "ecd17dd0-ce77-4f0e-b3da-ab33b857212a", "address": "fa:16:3e:d6:f7:be", "network": {"id": "4a3af343-aefd-4beb-b0af-8851348e050d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1038524057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc9f358fd6846ab889963f86dd64157", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecd17dd0-ce", "ovs_interfaceid": "ecd17dd0-ce77-4f0e-b3da-ab33b857212a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1364.440049] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:f7:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecd17dd0-ce77-4f0e-b3da-ab33b857212a', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1364.447359] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Creating folder: Project (4bc9f358fd6846ab889963f86dd64157). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1364.447827] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d4c95f7-811c-4f0f-8f64-70d7f3c6228c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.458428] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Created folder: Project (4bc9f358fd6846ab889963f86dd64157) in parent group-v51588. [ 1364.458580] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Creating folder: Instances. Parent ref: group-v51674. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1364.458882] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38a15c53-ff12-4daf-b1c1-5d1747212d37 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.466609] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Created folder: Instances in parent group-v51674. [ 1364.466833] env[61649]: DEBUG oslo.service.loopingcall [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1364.467008] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6243867-9546-4663-9d48-5c040537490b] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1364.467487] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61d4d3e5-1653-472a-b4fa-016bf4dfc857 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.485553] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1364.485553] env[61649]: value = "task-158235" [ 1364.485553] env[61649]: _type = "Task" [ 1364.485553] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.495216] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158235, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.506067] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158232, 'name': CreateVM_Task, 'duration_secs': 0.284521} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.506218] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1364.506828] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.506992] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.507301] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1364.507523] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b83e0a0-2abb-4cfd-bb8f-b20f72b77232 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.512410] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Waiting for the task: (returnval){ [ 1364.512410] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e7a81e-81c4-6f93-a00f-e192e60815bb" [ 1364.512410] env[61649]: _type = "Task" [ 1364.512410] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.520429] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e7a81e-81c4-6f93-a00f-e192e60815bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.995472] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158235, 'name': CreateVM_Task, 'duration_secs': 0.300502} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.995742] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6243867-9546-4663-9d48-5c040537490b] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1364.996561] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.024169] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.024512] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1365.024806] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.025096] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.025570] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1365.025892] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b377334d-33c5-4259-9ba4-f14871856f89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.031357] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Waiting for the task: (returnval){ [ 1365.031357] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ea8a36-3412-a4d5-e2c9-fcafe9171b79" [ 1365.031357] env[61649]: _type = "Task" [ 1365.031357] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.041802] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ea8a36-3412-a4d5-e2c9-fcafe9171b79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.541379] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.541691] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1365.542031] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.238983] env[61649]: DEBUG nova.compute.manager [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Received event network-changed-ecd17dd0-ce77-4f0e-b3da-ab33b857212a {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1366.239183] env[61649]: DEBUG nova.compute.manager [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Refreshing instance network info cache due to event network-changed-ecd17dd0-ce77-4f0e-b3da-ab33b857212a. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1366.239414] env[61649]: DEBUG oslo_concurrency.lockutils [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] Acquiring lock "refresh_cache-b6243867-9546-4663-9d48-5c040537490b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.239564] env[61649]: DEBUG oslo_concurrency.lockutils [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] Acquired lock "refresh_cache-b6243867-9546-4663-9d48-5c040537490b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.239735] env[61649]: DEBUG nova.network.neutron [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Refreshing network info cache for port ecd17dd0-ce77-4f0e-b3da-ab33b857212a {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1366.240232] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] Expecting reply to msg a92abd8afa7e40e6bb0388b9fe4dd371 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1366.248474] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a92abd8afa7e40e6bb0388b9fe4dd371 [ 1366.524618] env[61649]: DEBUG nova.network.neutron [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Updated VIF entry in instance network info cache for port ecd17dd0-ce77-4f0e-b3da-ab33b857212a. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1366.524977] env[61649]: DEBUG nova.network.neutron [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] [instance: b6243867-9546-4663-9d48-5c040537490b] Updating instance_info_cache with network_info: [{"id": "ecd17dd0-ce77-4f0e-b3da-ab33b857212a", "address": "fa:16:3e:d6:f7:be", "network": {"id": "4a3af343-aefd-4beb-b0af-8851348e050d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1038524057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc9f358fd6846ab889963f86dd64157", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecd17dd0-ce", "ovs_interfaceid": "ecd17dd0-ce77-4f0e-b3da-ab33b857212a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.525484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] Expecting reply to msg eaa19cdb0af5463fb3f5bb300e7a9144 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1366.533928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaa19cdb0af5463fb3f5bb300e7a9144 [ 1366.534512] env[61649]: DEBUG oslo_concurrency.lockutils [req-0369daeb-9327-449d-a012-c328952ab4d4 req-6931b526-bb20-4982-943a-a6aaa67a012d service nova] Releasing lock "refresh_cache-b6243867-9546-4663-9d48-5c040537490b" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.371798] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.981833] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 558fcc53541a42c9a91ae6266b164955 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1370.991055] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 558fcc53541a42c9a91ae6266b164955 [ 1372.928546] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.928860] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1372.928935] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1373.924865] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.928542] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1375.930025] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1375.930291] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1375.930340] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1375.931014] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4065f9a494db4f049f62049c3c856028 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1375.950249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4065f9a494db4f049f62049c3c856028 [ 1375.952671] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.952822] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.952955] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953083] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953207] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953326] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953544] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953689] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953813] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.953932] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1375.954078] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1375.954537] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.948532] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.949114] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e45697575b964fbcae556414b148a677 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1376.967568] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e45697575b964fbcae556414b148a677 [ 1377.929035] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.929403] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b40aded902774989ab289689a3252c2b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1377.941249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b40aded902774989ab289689a3252c2b [ 1377.942253] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.942471] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.942637] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.942792] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1377.943843] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9354d353-43b6-4169-a707-fb63d5974200 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.953751] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f147b1a9-df69-42b3-9f16-08e3a2b65fc2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.967234] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9c4420-54e8-4d89-bd67-219a804e299f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.973329] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1458a51b-cd2b-465f-ac2e-58c66c64f333 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.001701] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181775MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1378.001872] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.002064] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.002828] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 232c2420a0f143dfb5b10f2892e6a415 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.036026] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 232c2420a0f143dfb5b10f2892e6a415 [ 1378.039744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4d42ce8dfacc43d7ae30d7d659ba45f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.048870] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d42ce8dfacc43d7ae30d7d659ba45f1 [ 1378.066093] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance c9fe1bfe-e813-43e9-9668-b813416ee27b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066248] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066376] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066499] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066618] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066734] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066849] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.066962] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.067075] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.067188] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.067712] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 31ac617d327b42aeac399bc09017ddd8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.078392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31ac617d327b42aeac399bc09017ddd8 [ 1378.079173] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.079786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 959006d5066b4808b66727f63bd42caa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.089286] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 959006d5066b4808b66727f63bd42caa [ 1378.090085] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.090565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9b6043f157c04bfa8ad11bc290601f8a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.099761] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b6043f157c04bfa8ad11bc290601f8a [ 1378.100488] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 36d37cda-b987-4c5a-8af1-6eede009e61e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.100963] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 60d9ee6e8d484dc0b8879364c56b2b39 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.110973] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60d9ee6e8d484dc0b8879364c56b2b39 [ 1378.111436] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.111669] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1378.111813] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1378.265226] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e77b8f-ffc2-4963-ab4f-9847e4919696 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.273003] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6ff15e-9add-4b40-9b81-3e48d159f597 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.302357] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d89d6bc-f95b-4620-93eb-5ea030c06c3d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.308810] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db836e4-f8af-405a-a344-452d9e8c2042 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.321319] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.321819] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9bb860213c46432ea53e53b302e6fc4a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.328642] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bb860213c46432ea53e53b302e6fc4a [ 1378.329526] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1378.331999] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 98c1a4fd1b0842fc83c673f69bd5d0e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1378.342084] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98c1a4fd1b0842fc83c673f69bd5d0e0 [ 1378.342668] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1378.342841] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.341s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.343007] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.316101] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg a669162a67124382b6be683787ef4a6c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1381.324672] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a669162a67124382b6be683787ef4a6c [ 1381.325541] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "aa39503a-2342-421e-928f-35ec7c8e47fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.746600] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 0079ae23e46e4028933c9a1329dbafd2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1405.755727] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0079ae23e46e4028933c9a1329dbafd2 [ 1405.756211] env[61649]: DEBUG oslo_concurrency.lockutils [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.161252] env[61649]: WARNING oslo_vmware.rw_handles [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1410.161252] env[61649]: ERROR oslo_vmware.rw_handles [ 1410.162120] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1410.163690] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1410.163944] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Copying Virtual Disk [datastore1] vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/88c5ed7a-2bb2-46a9-bb82-3029e495badb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1410.164239] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1769e264-654f-4505-9c1c-122ea47ab4be {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.171557] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Waiting for the task: (returnval){ [ 1410.171557] env[61649]: value = "task-158236" [ 1410.171557] env[61649]: _type = "Task" [ 1410.171557] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.179315] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Task: {'id': task-158236, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.682174] env[61649]: DEBUG oslo_vmware.exceptions [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1410.682414] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.682991] env[61649]: ERROR nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1410.682991] env[61649]: Faults: ['InvalidArgument'] [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Traceback (most recent call last): [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] yield resources [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self.driver.spawn(context, instance, image_meta, [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self._fetch_image_if_missing(context, vi) [ 1410.682991] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] image_cache(vi, tmp_image_ds_loc) [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] vm_util.copy_virtual_disk( [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] session._wait_for_task(vmdk_copy_task) [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] return self.wait_for_task(task_ref) [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] return evt.wait() [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] result = hub.switch() [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1410.683377] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] return self.greenlet.switch() [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self.f(*self.args, **self.kw) [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] raise exceptions.translate_fault(task_info.error) [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Faults: ['InvalidArgument'] [ 1410.683807] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] [ 1410.683807] env[61649]: INFO nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Terminating instance [ 1410.684965] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.685176] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1410.685414] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-219b8cee-645b-4651-bf5d-b50576eccd7f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.687747] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1410.688020] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1410.688677] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344045b9-4885-4588-95e6-75a7a28afec3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.695232] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1410.695443] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e74a1fbe-f26e-481f-92e5-07721400a5eb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.697514] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1410.697681] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1410.698586] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9f51377-0e0f-4d14-a58d-38b7e5bbc126 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.703756] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Waiting for the task: (returnval){ [ 1410.703756] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ceb47e-d22d-6387-0c9b-fe953cd5272c" [ 1410.703756] env[61649]: _type = "Task" [ 1410.703756] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.710353] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ceb47e-d22d-6387-0c9b-fe953cd5272c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.755625] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1410.755824] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1410.756020] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Deleting the datastore file [datastore1] c9fe1bfe-e813-43e9-9668-b813416ee27b {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1410.756290] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac8006f4-506f-461c-b008-fe32723db511 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.763890] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Waiting for the task: (returnval){ [ 1410.763890] env[61649]: value = "task-158238" [ 1410.763890] env[61649]: _type = "Task" [ 1410.763890] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.771480] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Task: {'id': task-158238, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.213991] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1411.214338] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Creating directory with path [datastore1] vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1411.214522] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1dafd31-398a-45a8-82a9-b45255ecd49f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.224831] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Created directory with path [datastore1] vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1411.225019] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Fetch image to [datastore1] vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1411.225178] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1411.225865] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4fe7f0-91cf-4cd1-be93-4e40b7d86d8e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.231985] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d97fab-0036-45ee-93f9-03b36516ffa1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.240487] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cb3350-5e56-46b2-8238-922fbad1b6d1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.273277] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47561c52-b014-4db7-abcc-7fd4391fc2a1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.280049] env[61649]: DEBUG oslo_vmware.api [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Task: {'id': task-158238, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076457} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.281550] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1411.281745] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1411.281918] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1411.282091] env[61649]: INFO nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1411.283865] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6a0c36ce-a811-4e59-81fc-2a3c6822879b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.285746] env[61649]: DEBUG nova.compute.claims [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1411.285923] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.286134] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.288175] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 7a9884f2491b420a8168b9632cbd080b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1411.307913] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1411.319662] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a9884f2491b420a8168b9632cbd080b [ 1411.359902] env[61649]: DEBUG oslo_vmware.rw_handles [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1411.423365] env[61649]: DEBUG oslo_vmware.rw_handles [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1411.423567] env[61649]: DEBUG oslo_vmware.rw_handles [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1411.539947] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3807a4-930b-4e27-95b8-2a72ddcf2be5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.547179] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f357335f-ad05-4ffc-9c32-fc2aedb12943 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.577905] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe26dfd2-9955-400c-b5c2-14aefe4b2843 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.584682] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2313e5-d6ac-4056-979a-cacce6fca651 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.597498] env[61649]: DEBUG nova.compute.provider_tree [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.597984] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 3ca7a9c2b20f473da6b1c1788e25879c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1411.606053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ca7a9c2b20f473da6b1c1788e25879c [ 1411.606952] env[61649]: DEBUG nova.scheduler.client.report [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1411.609135] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 9da50191a3c34131b7609a785f585a53 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1411.619539] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9da50191a3c34131b7609a785f585a53 [ 1411.620223] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.620740] env[61649]: ERROR nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1411.620740] env[61649]: Faults: ['InvalidArgument'] [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Traceback (most recent call last): [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self.driver.spawn(context, instance, image_meta, [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self._fetch_image_if_missing(context, vi) [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] image_cache(vi, tmp_image_ds_loc) [ 1411.620740] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] vm_util.copy_virtual_disk( [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] session._wait_for_task(vmdk_copy_task) [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] return self.wait_for_task(task_ref) [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] return evt.wait() [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] result = hub.switch() [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] return self.greenlet.switch() [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1411.621159] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] self.f(*self.args, **self.kw) [ 1411.621522] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1411.621522] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] raise exceptions.translate_fault(task_info.error) [ 1411.621522] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1411.621522] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Faults: ['InvalidArgument'] [ 1411.621522] env[61649]: ERROR nova.compute.manager [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] [ 1411.621522] env[61649]: DEBUG nova.compute.utils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1411.622777] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Build of instance c9fe1bfe-e813-43e9-9668-b813416ee27b was re-scheduled: A specified parameter was not correct: fileType [ 1411.622777] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1411.623156] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1411.623335] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1411.623507] env[61649]: DEBUG nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1411.623673] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1411.997435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 25fcede1b06c467c85b278d2b1fed05c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.005907] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25fcede1b06c467c85b278d2b1fed05c [ 1412.006494] env[61649]: DEBUG nova.network.neutron [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.006939] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 13ae89ab60214d3d8787a5022981b87c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.016181] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13ae89ab60214d3d8787a5022981b87c [ 1412.016782] env[61649]: INFO nova.compute.manager [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Took 0.39 seconds to deallocate network for instance. [ 1412.018738] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 5eb46b549f504d43af6df03893f7d562 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.049022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5eb46b549f504d43af6df03893f7d562 [ 1412.051858] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 8b37c064c251402a80cf8aa9dfdff1e6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.081108] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b37c064c251402a80cf8aa9dfdff1e6 [ 1412.102312] env[61649]: INFO nova.scheduler.client.report [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Deleted allocations for instance c9fe1bfe-e813-43e9-9668-b813416ee27b [ 1412.108313] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 6c2ca5896d83414d8f2a3159f04931e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.119512] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c2ca5896d83414d8f2a3159f04931e5 [ 1412.120038] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c8787f7e-97ee-4d55-860f-8a9822c65104 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 593.835s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.120582] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 3c8f3a12309246a496784de999e2d894 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.121281] env[61649]: DEBUG oslo_concurrency.lockutils [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 397.480s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.121498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Acquiring lock "c9fe1bfe-e813-43e9-9668-b813416ee27b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.121713] env[61649]: DEBUG oslo_concurrency.lockutils [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.121871] env[61649]: DEBUG oslo_concurrency.lockutils [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.123803] env[61649]: INFO nova.compute.manager [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Terminating instance [ 1412.125468] env[61649]: DEBUG nova.compute.manager [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1412.125682] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1412.126143] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-038002ad-1ff1-4452-bd26-2d736fd82f08 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.134930] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a3e5ff-6fa6-45c6-9e5a-154c3a35dc85 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.145515] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c8f3a12309246a496784de999e2d894 [ 1412.145991] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1412.147737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg cc1f28668f944209a493f87ff82399cd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.164980] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9fe1bfe-e813-43e9-9668-b813416ee27b could not be found. [ 1412.165163] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1412.165334] env[61649]: INFO nova.compute.manager [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1412.165579] env[61649]: DEBUG oslo.service.loopingcall [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.165801] env[61649]: DEBUG nova.compute.manager [-] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1412.165900] env[61649]: DEBUG nova.network.neutron [-] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1412.175764] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc1f28668f944209a493f87ff82399cd [ 1412.182732] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e21e8e6943d04ce4a6c00ee6ba680e96 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.188740] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e21e8e6943d04ce4a6c00ee6ba680e96 [ 1412.189058] env[61649]: DEBUG nova.network.neutron [-] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.189439] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b6b4b7c594da424bb69dafe83ce02fb5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.190689] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.190918] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.192496] env[61649]: INFO nova.compute.claims [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1412.194004] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg b7c89182ea0d4be399e1128547316523 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.197397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6b4b7c594da424bb69dafe83ce02fb5 [ 1412.197822] env[61649]: INFO nova.compute.manager [-] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] Took 0.03 seconds to deallocate network for instance. [ 1412.201072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg 5eebaabab6ba4c66818cafa1afa16991 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.226924] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7c89182ea0d4be399e1128547316523 [ 1412.228549] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg a6b8a302f031491ab964e238097094bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.229524] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5eebaabab6ba4c66818cafa1afa16991 [ 1412.239794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6b8a302f031491ab964e238097094bc [ 1412.245644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg ac7239c0b3eb417e84c7540e565d950d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.282770] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac7239c0b3eb417e84c7540e565d950d [ 1412.285519] env[61649]: DEBUG oslo_concurrency.lockutils [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.285836] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-07795c89-5627-4ba4-b61b-8c17429c82b9 tempest-ServersWithSpecificFlavorTestJSON-519795392 tempest-ServersWithSpecificFlavorTestJSON-519795392-project-member] Expecting reply to msg fcb69456c50e4db8a6f768b1fd1f287d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.286575] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 284.258s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.286760] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: c9fe1bfe-e813-43e9-9668-b813416ee27b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1412.287313] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "c9fe1bfe-e813-43e9-9668-b813416ee27b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.294910] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcb69456c50e4db8a6f768b1fd1f287d [ 1412.389478] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9091f70f-9a3a-48f5-804f-6b409642b24c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.397124] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d29930-6ec9-439e-ae39-c6cabf90343c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.428174] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a088bb0c-7f59-41f1-87d9-74e7f9e980c9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.435290] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed50d7f-95e9-416b-aef9-741299cdec97 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.448204] env[61649]: DEBUG nova.compute.provider_tree [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.448686] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg d138b1b9eeee43598a2ba8cd20efff87 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.457850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d138b1b9eeee43598a2ba8cd20efff87 [ 1412.457850] env[61649]: DEBUG nova.scheduler.client.report [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1412.459912] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg d96531f8fcf64201919dc7f541037dd6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.470683] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d96531f8fcf64201919dc7f541037dd6 [ 1412.471415] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.280s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.471933] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1412.473621] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 0c6c805e6f68480c987210c6db0b4fe1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.501340] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c6c805e6f68480c987210c6db0b4fe1 [ 1412.502947] env[61649]: DEBUG nova.compute.utils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1412.504052] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 79361801f34342fea4cdcd5378a588fa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.505111] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1412.505380] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1412.513081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79361801f34342fea4cdcd5378a588fa [ 1412.513701] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1412.515271] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 15e73b9f11aa48a58bdaaa581dab47ca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.542449] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15e73b9f11aa48a58bdaaa581dab47ca [ 1412.545046] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg d867ceb021f94113b8b1b5fcc15f0a47 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1412.548393] env[61649]: DEBUG nova.policy [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed97b90b998c477eae669b6132359808', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '911980132e374bbd9e861e2fba9466f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1412.576637] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d867ceb021f94113b8b1b5fcc15f0a47 [ 1412.577772] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1412.598290] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.598530] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.598685] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.598864] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.599008] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.599156] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.599418] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.599552] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.599726] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.599890] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.600076] env[61649]: DEBUG nova.virt.hardware [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.600941] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3242e28-2ada-4a03-888f-b538d453590a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.608583] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be748030-7089-4a55-8a28-56a627cf768e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.875244] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Successfully created port: 118d2874-57b0-4121-a999-9c45097feea7 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.406517] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Successfully updated port: 118d2874-57b0-4121-a999-9c45097feea7 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.406517] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg f7795069154b4e68817ce6f9025bc0bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1413.416045] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7795069154b4e68817ce6f9025bc0bc [ 1413.416699] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "refresh_cache-4b87e74a-2408-466f-b1c2-68330c31fb9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.416830] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired lock "refresh_cache-4b87e74a-2408-466f-b1c2-68330c31fb9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.416972] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1413.417372] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg e21e66045e064f7497c1f4654251ba2b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1413.424427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e21e66045e064f7497c1f4654251ba2b [ 1413.458496] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1413.640757] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Updating instance_info_cache with network_info: [{"id": "118d2874-57b0-4121-a999-9c45097feea7", "address": "fa:16:3e:9c:73:f3", "network": {"id": "cf910fb5-25a3-4ac3-81ee-4f21af78b736", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1804430108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "911980132e374bbd9e861e2fba9466f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap118d2874-57", "ovs_interfaceid": "118d2874-57b0-4121-a999-9c45097feea7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.641259] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 048e579593044fb9b01e8d298f3eadcd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1413.653318] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 048e579593044fb9b01e8d298f3eadcd [ 1413.653938] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Releasing lock "refresh_cache-4b87e74a-2408-466f-b1c2-68330c31fb9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.654145] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance network_info: |[{"id": "118d2874-57b0-4121-a999-9c45097feea7", "address": "fa:16:3e:9c:73:f3", "network": {"id": "cf910fb5-25a3-4ac3-81ee-4f21af78b736", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1804430108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "911980132e374bbd9e861e2fba9466f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap118d2874-57", "ovs_interfaceid": "118d2874-57b0-4121-a999-9c45097feea7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1413.654521] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:73:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '118d2874-57b0-4121-a999-9c45097feea7', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1413.661785] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating folder: Project (911980132e374bbd9e861e2fba9466f8). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1413.662277] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f85325ce-8e8b-48c4-9ec1-493d590f2a57 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.675245] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Created folder: Project (911980132e374bbd9e861e2fba9466f8) in parent group-v51588. [ 1413.675437] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating folder: Instances. Parent ref: group-v51677. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1413.675668] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-957ad649-e072-4b98-9a96-6a42415681ff {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.684803] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Created folder: Instances in parent group-v51677. [ 1413.685043] env[61649]: DEBUG oslo.service.loopingcall [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1413.685213] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1413.685407] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18610723-e52b-4120-bc79-cca76c143790 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.705374] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1413.705374] env[61649]: value = "task-158241" [ 1413.705374] env[61649]: _type = "Task" [ 1413.705374] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.716538] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158241, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.049171] env[61649]: DEBUG nova.compute.manager [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Received event network-vif-plugged-118d2874-57b0-4121-a999-9c45097feea7 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1414.049429] env[61649]: DEBUG oslo_concurrency.lockutils [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Acquiring lock "4b87e74a-2408-466f-b1c2-68330c31fb9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.049672] env[61649]: DEBUG oslo_concurrency.lockutils [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.049840] env[61649]: DEBUG oslo_concurrency.lockutils [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.050005] env[61649]: DEBUG nova.compute.manager [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] No waiting events found dispatching network-vif-plugged-118d2874-57b0-4121-a999-9c45097feea7 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1414.050166] env[61649]: WARNING nova.compute.manager [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Received unexpected event network-vif-plugged-118d2874-57b0-4121-a999-9c45097feea7 for instance with vm_state building and task_state spawning. [ 1414.050385] env[61649]: DEBUG nova.compute.manager [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Received event network-changed-118d2874-57b0-4121-a999-9c45097feea7 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1414.050534] env[61649]: DEBUG nova.compute.manager [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Refreshing instance network info cache due to event network-changed-118d2874-57b0-4121-a999-9c45097feea7. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1414.050725] env[61649]: DEBUG oslo_concurrency.lockutils [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Acquiring lock "refresh_cache-4b87e74a-2408-466f-b1c2-68330c31fb9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.050860] env[61649]: DEBUG oslo_concurrency.lockutils [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Acquired lock "refresh_cache-4b87e74a-2408-466f-b1c2-68330c31fb9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.051015] env[61649]: DEBUG nova.network.neutron [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Refreshing network info cache for port 118d2874-57b0-4121-a999-9c45097feea7 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1414.051488] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Expecting reply to msg 7626eb33c1ee48a19384c29732b6db58 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1414.058459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7626eb33c1ee48a19384c29732b6db58 [ 1414.214834] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158241, 'name': CreateVM_Task, 'duration_secs': 0.258142} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.214996] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1414.215723] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.215919] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.216249] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1414.216496] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e87c3e16-a923-4a95-863a-12ed3094d694 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.221084] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 1414.221084] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]524a3bf8-c735-e794-b132-cd0e0510ba0f" [ 1414.221084] env[61649]: _type = "Task" [ 1414.221084] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.228546] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]524a3bf8-c735-e794-b132-cd0e0510ba0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.275477] env[61649]: DEBUG nova.network.neutron [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Updated VIF entry in instance network info cache for port 118d2874-57b0-4121-a999-9c45097feea7. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1414.275820] env[61649]: DEBUG nova.network.neutron [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Updating instance_info_cache with network_info: [{"id": "118d2874-57b0-4121-a999-9c45097feea7", "address": "fa:16:3e:9c:73:f3", "network": {"id": "cf910fb5-25a3-4ac3-81ee-4f21af78b736", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1804430108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "911980132e374bbd9e861e2fba9466f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap118d2874-57", "ovs_interfaceid": "118d2874-57b0-4121-a999-9c45097feea7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.276348] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Expecting reply to msg bf125db11612462696b2ade307f96a8d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1414.284784] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf125db11612462696b2ade307f96a8d [ 1414.285373] env[61649]: DEBUG oslo_concurrency.lockutils [req-95a0e4a8-53a4-4086-82d4-c3c27781ee55 req-17402da5-92d2-4d8c-8bfb-79327f1d5871 service nova] Releasing lock "refresh_cache-4b87e74a-2408-466f-b1c2-68330c31fb9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.730922] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.731667] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1414.732071] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.067093] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.067379] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.161009] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg ad749e613bd9482ea19f5f6361d02276 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1425.169292] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad749e613bd9482ea19f5f6361d02276 [ 1425.169762] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "b6243867-9546-4663-9d48-5c040537490b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.929263] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.178977] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 5d66459da80045288f6c2e7f6bac2185 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1432.187948] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d66459da80045288f6c2e7f6bac2185 [ 1432.188794] env[61649]: DEBUG oslo_concurrency.lockutils [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.923891] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.928522] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.928680] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1434.929505] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.929766] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.929326] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.929530] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 1435.929859] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 434d08fa6c9748b0893c903c75932efe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1435.936616] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 434d08fa6c9748b0893c903c75932efe [ 1436.936597] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.936948] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1436.936948] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1436.937467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ac76d84ffe7e4586b6aa0fade7f20048 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1436.954437] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac76d84ffe7e4586b6aa0fade7f20048 [ 1436.956647] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.956788] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.956922] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957048] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957174] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957296] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957417] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957537] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957655] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957772] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1436.957891] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1436.958364] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.929249] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.929627] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a24d2b29bf474d6f83656c4cfbaef782 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1437.938345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a24d2b29bf474d6f83656c4cfbaef782 [ 1437.939275] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.939505] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.939677] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.939831] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1437.940898] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28798507-a025-4195-8e3e-3e1cc2d9a70c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.949305] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ec1806-634f-4214-ba77-0aedf7b6d920 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.962527] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a79df3e-561e-48b9-ae39-969839f3380d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.968527] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bc1d41-0e22-4d12-8979-a475194ee5f9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.996051] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181759MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1437.996209] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.996401] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.997213] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 75ff2f16c2974797aefc038247a0f310 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.032281] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75ff2f16c2974797aefc038247a0f310 [ 1438.036323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6bd0865966ba4f1799118ff08a7f1114 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.045245] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bd0865966ba4f1799118ff08a7f1114 [ 1438.061858] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062013] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062132] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062236] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062348] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062457] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062565] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062672] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062777] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.062892] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.063376] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 58e57a12867041bf8ae2de77e43ec9b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.072823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58e57a12867041bf8ae2de77e43ec9b0 [ 1438.073602] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1438.074187] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 142a1a1525264c2e81bc922a3c5f65a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.083652] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 142a1a1525264c2e81bc922a3c5f65a9 [ 1438.084404] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 36d37cda-b987-4c5a-8af1-6eede009e61e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1438.084995] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f5ca5142034c4f9792eef8b99a1b2b96 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.094401] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5ca5142034c4f9792eef8b99a1b2b96 [ 1438.095027] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1438.095462] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1c94daff8ac840d9b3dfa23dfa7a5ecc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.103895] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c94daff8ac840d9b3dfa23dfa7a5ecc [ 1438.104487] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1438.104701] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1438.104844] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1438.258270] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea159668-f1af-498d-a151-79bc9c6aa9a1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.265623] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d9f5dc-d718-4a9e-8091-207b10060ca6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.294336] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c4d52d-0883-4b27-b776-2431f4b2a1cb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.301587] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91a5685-b609-4027-b9fb-2f74afe22358 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.315166] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.315615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2fef6382538b42f4af0a3585edf78b3d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.323136] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fef6382538b42f4af0a3585edf78b3d [ 1438.324038] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1438.326173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c1d50a3d99844869a450cfd27d97414e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.340669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1d50a3d99844869a450cfd27d97414e [ 1438.341320] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1438.341498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.345s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.928768] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.928888] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.929172] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5a8213bf41084d6fb44c77e789f2fdf4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1438.936053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a8213bf41084d6fb44c77e789f2fdf4 [ 1441.937473] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.937753] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 1441.938232] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8000106fdb50495e9017a340009b51a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1441.948217] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8000106fdb50495e9017a340009b51a5 [ 1441.948741] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 1460.117261] env[61649]: WARNING oslo_vmware.rw_handles [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1460.117261] env[61649]: ERROR oslo_vmware.rw_handles [ 1460.117915] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1460.119776] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1460.120057] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Copying Virtual Disk [datastore1] vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/a133ce5a-54cd-4a92-afbe-dda577c086f1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1460.120372] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7e1774d-d154-436d-8a6a-ab022380fe03 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.128276] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Waiting for the task: (returnval){ [ 1460.128276] env[61649]: value = "task-158242" [ 1460.128276] env[61649]: _type = "Task" [ 1460.128276] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.135984] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Task: {'id': task-158242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.638613] env[61649]: DEBUG oslo_vmware.exceptions [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1460.639026] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.639663] env[61649]: ERROR nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1460.639663] env[61649]: Faults: ['InvalidArgument'] [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Traceback (most recent call last): [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] yield resources [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self.driver.spawn(context, instance, image_meta, [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self._fetch_image_if_missing(context, vi) [ 1460.639663] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] image_cache(vi, tmp_image_ds_loc) [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] vm_util.copy_virtual_disk( [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] session._wait_for_task(vmdk_copy_task) [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] return self.wait_for_task(task_ref) [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] return evt.wait() [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] result = hub.switch() [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1460.640197] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] return self.greenlet.switch() [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self.f(*self.args, **self.kw) [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] raise exceptions.translate_fault(task_info.error) [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Faults: ['InvalidArgument'] [ 1460.640592] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] [ 1460.640592] env[61649]: INFO nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Terminating instance [ 1460.641906] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.642155] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1460.642425] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0598def4-952a-4c3d-a095-2b3b596228ce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.644511] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1460.644746] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1460.645473] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231fc5f1-9a9c-434c-ac56-62bfb6701e39 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.652593] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1460.652850] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c249bf7-a48b-4f63-9981-c9f004e4fea5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.654974] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1460.655157] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1460.656153] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6017b5d-0760-4ca1-b7c5-397b9310d4f1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.660547] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1460.660547] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5282d094-ce4a-fc8e-67b2-18ac21033c44" [ 1460.660547] env[61649]: _type = "Task" [ 1460.660547] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.667295] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5282d094-ce4a-fc8e-67b2-18ac21033c44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.717160] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1460.717486] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1460.717719] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Deleting the datastore file [datastore1] 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.718018] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1aa4a8b-9d8f-47b4-9268-60c43ce6a565 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.723367] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Waiting for the task: (returnval){ [ 1460.723367] env[61649]: value = "task-158244" [ 1460.723367] env[61649]: _type = "Task" [ 1460.723367] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.731038] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Task: {'id': task-158244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.170518] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1461.170846] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.171030] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beb9d4b7-e05b-4427-a7cc-c8321d7a0349 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.182448] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.182791] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Fetch image to [datastore1] vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1461.183039] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1461.183794] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208457fd-f415-42e2-bb1e-148854bf8fd4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.190171] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a1c453-7285-4009-8c02-efe2aa3e90c9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.198977] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1f75f6-3a99-4587-acc7-69875be124b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.234029] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f6a195-b6ab-46f1-bdc6-53ac1d7d95cc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.240638] env[61649]: DEBUG oslo_vmware.api [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Task: {'id': task-158244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065749} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.242029] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.242224] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1461.242398] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1461.242573] env[61649]: INFO nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1461.244398] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-70b384ca-90fe-4d44-b640-2ed0e38061d4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.246184] env[61649]: DEBUG nova.compute.claims [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1461.246360] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.246571] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.248373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg e3a059778cf6442c97026fa9106aa1ca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.266551] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1461.278268] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3a059778cf6442c97026fa9106aa1ca [ 1461.314425] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1461.372688] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1461.372878] env[61649]: DEBUG oslo_vmware.rw_handles [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1461.488916] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2dc655d-0a22-4acc-a5dc-ec3a552818a3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.496564] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39e31ea-8371-4010-ae03-354f835819e6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.526747] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65de5d9c-55ef-43bc-9a9f-9dcb46353727 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.533978] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b52ea4-ca49-43e5-a35a-7cb5c2e4807b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.546597] env[61649]: DEBUG nova.compute.provider_tree [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1461.547181] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg ea0fea73d4374dcc91416d4c554d668a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.555643] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea0fea73d4374dcc91416d4c554d668a [ 1461.556575] env[61649]: DEBUG nova.scheduler.client.report [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1461.558957] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg ab405c941f3d420d8704543bc922fe70 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.569062] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab405c941f3d420d8704543bc922fe70 [ 1461.569795] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.570329] env[61649]: ERROR nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1461.570329] env[61649]: Faults: ['InvalidArgument'] [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Traceback (most recent call last): [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self.driver.spawn(context, instance, image_meta, [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self._fetch_image_if_missing(context, vi) [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] image_cache(vi, tmp_image_ds_loc) [ 1461.570329] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] vm_util.copy_virtual_disk( [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] session._wait_for_task(vmdk_copy_task) [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] return self.wait_for_task(task_ref) [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] return evt.wait() [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] result = hub.switch() [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] return self.greenlet.switch() [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1461.570734] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] self.f(*self.args, **self.kw) [ 1461.571099] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1461.571099] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] raise exceptions.translate_fault(task_info.error) [ 1461.571099] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1461.571099] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Faults: ['InvalidArgument'] [ 1461.571099] env[61649]: ERROR nova.compute.manager [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] [ 1461.571099] env[61649]: DEBUG nova.compute.utils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1461.572363] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Build of instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 was re-scheduled: A specified parameter was not correct: fileType [ 1461.572363] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1461.572740] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1461.572919] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1461.573090] env[61649]: DEBUG nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1461.573257] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1461.860170] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 9f4a0dc5ada940989f1c9f12147030d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.868214] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f4a0dc5ada940989f1c9f12147030d3 [ 1461.868214] env[61649]: DEBUG nova.network.neutron [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.868444] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 2437bf08a2f745efbad209c5bdec0718 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.878810] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2437bf08a2f745efbad209c5bdec0718 [ 1461.879422] env[61649]: INFO nova.compute.manager [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Took 0.31 seconds to deallocate network for instance. [ 1461.881109] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 0127a2dfd42f4aa1beeced621a3cdf23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.911932] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0127a2dfd42f4aa1beeced621a3cdf23 [ 1461.914629] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 4790cbfd7a644d6c9a7b546e60b2ee53 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.945692] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4790cbfd7a644d6c9a7b546e60b2ee53 [ 1461.967470] env[61649]: INFO nova.scheduler.client.report [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Deleted allocations for instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 [ 1461.974334] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 29a5273670f14df08f99f28ccd84e81e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.987513] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29a5273670f14df08f99f28ccd84e81e [ 1461.988085] env[61649]: DEBUG oslo_concurrency.lockutils [None req-65cb3a69-0426-4087-8675-c07ba8e0fb59 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 591.964s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.988623] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 6aeff8ebef744235895159f283b6bc14 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1461.989582] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 395.967s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.989582] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Acquiring lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.989791] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.991016] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.992028] env[61649]: INFO nova.compute.manager [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Terminating instance [ 1461.993835] env[61649]: DEBUG nova.compute.manager [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1461.994037] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1461.994497] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6722c764-1931-4a22-864c-6784e010ede0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.999522] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6aeff8ebef744235895159f283b6bc14 [ 1461.999920] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1462.001524] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 2e2270c5bb0742c79e9581e5cef2de7c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.006198] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0fd3f2-560f-4fd5-a8bb-34baffc519e3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.034197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e2270c5bb0742c79e9581e5cef2de7c [ 1462.034684] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cdd96c2-2837-4cb3-855c-ecad727dd5d4 could not be found. [ 1462.034961] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1462.035159] env[61649]: INFO nova.compute.manager [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1462.035401] env[61649]: DEBUG oslo.service.loopingcall [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1462.037564] env[61649]: DEBUG nova.compute.manager [-] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1462.037692] env[61649]: DEBUG nova.network.neutron [-] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1462.051559] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.051803] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.053200] env[61649]: INFO nova.compute.claims [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1462.054702] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 93142847f3d24085adc5ee59a46c0898 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.057518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 061edb2117d24f9f9e8308ee09e26bc0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.064088] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 061edb2117d24f9f9e8308ee09e26bc0 [ 1462.065184] env[61649]: DEBUG nova.network.neutron [-] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.065310] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c75affe70d924001955e8b809413a82e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.083672] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c75affe70d924001955e8b809413a82e [ 1462.084228] env[61649]: INFO nova.compute.manager [-] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] Took 0.05 seconds to deallocate network for instance. [ 1462.087655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 63976140cfb9405e92411e9317125bb6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.090641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93142847f3d24085adc5ee59a46c0898 [ 1462.090641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 5361af14c97b42b8bcb93dbf3ff8f332 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.096060] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5361af14c97b42b8bcb93dbf3ff8f332 [ 1462.114890] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63976140cfb9405e92411e9317125bb6 [ 1462.129248] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg b0b2781a3e0144b7a98d0618e51c93a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.163335] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0b2781a3e0144b7a98d0618e51c93a7 [ 1462.165380] env[61649]: DEBUG oslo_concurrency.lockutils [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.165688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-34060b93-1cea-441a-855e-cea394fedd21 tempest-ImagesNegativeTestJSON-2078991854 tempest-ImagesNegativeTestJSON-2078991854-project-member] Expecting reply to msg 53f6c1e16372406c8f44a43a1028b0f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.166436] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 334.138s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.166614] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9cdd96c2-2837-4cb3-855c-ecad727dd5d4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1462.166776] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "9cdd96c2-2837-4cb3-855c-ecad727dd5d4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.174723] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53f6c1e16372406c8f44a43a1028b0f7 [ 1462.253448] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64ea561-d3ae-4224-b5be-5ffa23370bdb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.260627] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bf4d33-816d-4d57-a844-a445f34a45b7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.290518] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b09d93-b6dc-4a2d-82e6-f87baa4ca09d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.297371] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0438fc9-b414-4662-bf39-bf8d27a9e333 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.310043] env[61649]: DEBUG nova.compute.provider_tree [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.310532] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg d3c35729c24b4dc986d40e913e11d594 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.318190] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3c35729c24b4dc986d40e913e11d594 [ 1462.319018] env[61649]: DEBUG nova.scheduler.client.report [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1462.321267] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 58b8f53c20b143dbbb8ff1c7656ae773 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.334852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58b8f53c20b143dbbb8ff1c7656ae773 [ 1462.335571] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.336052] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1462.337651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg b0b7514da66142e0ad8daf011104fea0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.367791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0b7514da66142e0ad8daf011104fea0 [ 1462.369454] env[61649]: DEBUG nova.compute.utils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1462.370038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg d65b3306b2464a19b390ee47ddd86c08 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.370984] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1462.371153] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1462.378818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d65b3306b2464a19b390ee47ddd86c08 [ 1462.379304] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1462.380908] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg af33b5a7ed4347d48a45e0d8cb26c940 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.408188] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af33b5a7ed4347d48a45e0d8cb26c940 [ 1462.410231] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 0d82c66526b749f7a7b9c8b8e7ce8e19 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1462.444072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d82c66526b749f7a7b9c8b8e7ce8e19 [ 1462.444072] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1462.450369] env[61649]: DEBUG nova.policy [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6cdf21e34794c8e992f03fa012a44a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4da48d087c8349b4b0e48c5e4b329962', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1462.463572] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1462.463815] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1462.463975] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1462.464173] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1462.464321] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1462.464469] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1462.464671] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1462.464831] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1462.464993] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1462.465154] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1462.465326] env[61649]: DEBUG nova.virt.hardware [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1462.466243] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98ef186-a10d-4d2e-99f8-8331076a793f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.473972] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21533edf-592b-40be-9400-fc839f21a929 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.995931] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Successfully created port: 536c43cb-9e12-4ec7-984e-39371e46f499 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1463.515875] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Successfully updated port: 536c43cb-9e12-4ec7-984e-39371e46f499 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1463.516451] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg baf7d2a142af44bab6b29f15ad732a6f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1463.523197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baf7d2a142af44bab6b29f15ad732a6f [ 1463.523853] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "refresh_cache-ff225293-ad72-499a-9b5b-147d0bc40350" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.523993] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquired lock "refresh_cache-ff225293-ad72-499a-9b5b-147d0bc40350" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.524156] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1463.524539] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 1621798131b84724b9c2c191685fa235 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1463.531199] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1621798131b84724b9c2c191685fa235 [ 1463.568352] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1463.713959] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Updating instance_info_cache with network_info: [{"id": "536c43cb-9e12-4ec7-984e-39371e46f499", "address": "fa:16:3e:0a:a1:2d", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536c43cb-9e", "ovs_interfaceid": "536c43cb-9e12-4ec7-984e-39371e46f499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.714465] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 4968b870be5948aa91a22c20e6e5cd3e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1463.726480] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4968b870be5948aa91a22c20e6e5cd3e [ 1463.727077] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Releasing lock "refresh_cache-ff225293-ad72-499a-9b5b-147d0bc40350" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.727363] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance network_info: |[{"id": "536c43cb-9e12-4ec7-984e-39371e46f499", "address": "fa:16:3e:0a:a1:2d", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536c43cb-9e", "ovs_interfaceid": "536c43cb-9e12-4ec7-984e-39371e46f499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1463.727758] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:a1:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a8c8175-1197-4f12-baac-ef6aba95f585', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '536c43cb-9e12-4ec7-984e-39371e46f499', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1463.735038] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Creating folder: Project (4da48d087c8349b4b0e48c5e4b329962). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1463.735542] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-566a167f-3aea-4942-9f51-5c95de5db979 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.749468] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Created folder: Project (4da48d087c8349b4b0e48c5e4b329962) in parent group-v51588. [ 1463.749659] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Creating folder: Instances. Parent ref: group-v51680. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1463.749880] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8af09af1-b41a-4de0-a03f-077f55b8ee21 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.759358] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Created folder: Instances in parent group-v51680. [ 1463.759605] env[61649]: DEBUG oslo.service.loopingcall [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.759823] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1463.760033] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24a28aa8-c24c-4f82-84dc-eadba006f8bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.780084] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1463.780084] env[61649]: value = "task-158247" [ 1463.780084] env[61649]: _type = "Task" [ 1463.780084] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.787559] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158247, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.916900] env[61649]: DEBUG nova.compute.manager [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Received event network-vif-plugged-536c43cb-9e12-4ec7-984e-39371e46f499 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1463.917191] env[61649]: DEBUG oslo_concurrency.lockutils [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Acquiring lock "ff225293-ad72-499a-9b5b-147d0bc40350-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.917426] env[61649]: DEBUG oslo_concurrency.lockutils [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Lock "ff225293-ad72-499a-9b5b-147d0bc40350-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.917595] env[61649]: DEBUG oslo_concurrency.lockutils [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Lock "ff225293-ad72-499a-9b5b-147d0bc40350-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.918058] env[61649]: DEBUG nova.compute.manager [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] No waiting events found dispatching network-vif-plugged-536c43cb-9e12-4ec7-984e-39371e46f499 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1463.918317] env[61649]: WARNING nova.compute.manager [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Received unexpected event network-vif-plugged-536c43cb-9e12-4ec7-984e-39371e46f499 for instance with vm_state building and task_state spawning. [ 1463.918491] env[61649]: DEBUG nova.compute.manager [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Received event network-changed-536c43cb-9e12-4ec7-984e-39371e46f499 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1463.918652] env[61649]: DEBUG nova.compute.manager [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Refreshing instance network info cache due to event network-changed-536c43cb-9e12-4ec7-984e-39371e46f499. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1463.918839] env[61649]: DEBUG oslo_concurrency.lockutils [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Acquiring lock "refresh_cache-ff225293-ad72-499a-9b5b-147d0bc40350" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.918974] env[61649]: DEBUG oslo_concurrency.lockutils [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Acquired lock "refresh_cache-ff225293-ad72-499a-9b5b-147d0bc40350" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.919138] env[61649]: DEBUG nova.network.neutron [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Refreshing network info cache for port 536c43cb-9e12-4ec7-984e-39371e46f499 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1463.919815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Expecting reply to msg 3f1e15cc4f4f4fadb9105461fe2971a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1463.927260] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f1e15cc4f4f4fadb9105461fe2971a9 [ 1464.205763] env[61649]: DEBUG nova.network.neutron [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Updated VIF entry in instance network info cache for port 536c43cb-9e12-4ec7-984e-39371e46f499. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1464.206129] env[61649]: DEBUG nova.network.neutron [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Updating instance_info_cache with network_info: [{"id": "536c43cb-9e12-4ec7-984e-39371e46f499", "address": "fa:16:3e:0a:a1:2d", "network": {"id": "b96f19dc-5248-4d43-8f39-ea3131aaf6db", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d9c1bd4c77004c3cb8e42232cad1896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a8c8175-1197-4f12-baac-ef6aba95f585", "external-id": "nsx-vlan-transportzone-832", "segmentation_id": 832, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536c43cb-9e", "ovs_interfaceid": "536c43cb-9e12-4ec7-984e-39371e46f499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.206650] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Expecting reply to msg 8ae16b5fcbd4447db9bc9a22510e5d22 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1464.214843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ae16b5fcbd4447db9bc9a22510e5d22 [ 1464.215404] env[61649]: DEBUG oslo_concurrency.lockutils [req-72f3c5a1-2738-4019-950a-f4583530e84a req-c3357b00-0aba-4fee-a2e3-5f29ead3eb74 service nova] Releasing lock "refresh_cache-ff225293-ad72-499a-9b5b-147d0bc40350" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.289657] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158247, 'name': CreateVM_Task, 'duration_secs': 0.296588} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.289777] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1464.290385] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.290566] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.290877] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1464.291109] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed654e2-5350-4488-8a04-948c3845062a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.295236] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Waiting for the task: (returnval){ [ 1464.295236] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5294f45c-aa1f-f2e2-49db-1ff4d36beec3" [ 1464.295236] env[61649]: _type = "Task" [ 1464.295236] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.302303] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5294f45c-aa1f-f2e2-49db-1ff4d36beec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.804735] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.805006] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1464.805212] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.596291] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg c1b39d486a114a03938acc696335fea9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1466.605065] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1b39d486a114a03938acc696335fea9 [ 1466.605525] env[61649]: DEBUG oslo_concurrency.lockutils [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "ff225293-ad72-499a-9b5b-147d0bc40350" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.767209] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.767555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.982432] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 61e2bcb183df46f7aed6ff5f4f70d3b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1490.992190] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61e2bcb183df46f7aed6ff5f4f70d3b0 [ 1491.940961] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.928909] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.928909] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1494.923956] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.928405] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.929428] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.932033] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.932033] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1496.932033] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1496.932033] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4bf5b13a2dff429ab0ed7a15a9ee2b5b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1496.949105] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bf5b13a2dff429ab0ed7a15a9ee2b5b [ 1496.951324] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.951473] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.951604] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.951728] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.951848] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.951970] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.952108] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.952228] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.952345] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.952471] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1496.952614] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1497.929243] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.924618] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.925287] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0d028fdeba094881b6369e2e8a7b6978 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1499.942379] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d028fdeba094881b6369e2e8a7b6978 [ 1499.945848] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.946151] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg cb036f0fb0bc43d388e0fe56aadd92f5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1499.954099] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb036f0fb0bc43d388e0fe56aadd92f5 [ 1499.954924] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.955199] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.955370] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.955525] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1499.956550] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d15f90-e70d-413c-9afd-64554acd68c6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.965137] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8fe74f-d36d-4bec-aa64-ab9ebed74106 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.978849] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a07879-acd0-4736-a3ac-59659a65d051 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.984872] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4459ac5-e08a-47ba-b41c-e7cf1622112a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.012699] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181753MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1500.012852] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.013045] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.013828] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3eeb87596e2a44138424360eccc95e16 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.047955] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3eeb87596e2a44138424360eccc95e16 [ 1500.051580] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 22f06fe812594a7185af8196722b3648 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.060031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22f06fe812594a7185af8196722b3648 [ 1500.109531] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.109692] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.109860] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.109931] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.110095] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.110228] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.110316] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.110406] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.110487] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.110598] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.111230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 01e9ce01d2c04ac6a395adde731f9292 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.126068] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01e9ce01d2c04ac6a395adde731f9292 [ 1500.126857] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1500.127453] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f2201038b90e4d709d48e2e8c5cbf196 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.136801] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2201038b90e4d709d48e2e8c5cbf196 [ 1500.137476] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1500.138023] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ed761802632a462d9356e53daff5cd92 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.147307] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed761802632a462d9356e53daff5cd92 [ 1500.147962] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1500.148289] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1500.148445] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1500.164786] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1500.177382] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1500.177568] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1500.188383] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1500.203574] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1500.341902] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ac280e-7a54-49a9-9eb1-b1fe0e080b43 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.348897] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a309fc1-9db5-471b-98f0-7347d9883cb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.377071] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9f6233-e3c4-4887-a90d-7e25e2425b53 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.383297] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab9f709-4cb6-4e13-8294-672dd88ac1d9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.396469] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1500.396469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ccda53df687c4ffe9d04005a63a8a9c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.404300] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccda53df687c4ffe9d04005a63a8a9c7 [ 1500.405028] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1500.407275] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 84defda0f1604f658a2be1de396085f7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1500.419773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84defda0f1604f658a2be1de396085f7 [ 1500.420405] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1500.420644] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.408s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.403713] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.452774] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.453083] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.137044] env[61649]: WARNING oslo_vmware.rw_handles [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1510.137044] env[61649]: ERROR oslo_vmware.rw_handles [ 1510.137728] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1510.139492] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1510.139786] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Copying Virtual Disk [datastore1] vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/4ecc94fc-331b-42fe-9667-989aaf5bedb4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1510.141777] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9712aac4-2a3f-4272-95ad-510c74ef96bb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.148642] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1510.148642] env[61649]: value = "task-158248" [ 1510.148642] env[61649]: _type = "Task" [ 1510.148642] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.156605] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158248, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.658834] env[61649]: DEBUG oslo_vmware.exceptions [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1510.659081] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.659669] env[61649]: ERROR nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1510.659669] env[61649]: Faults: ['InvalidArgument'] [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Traceback (most recent call last): [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] yield resources [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self.driver.spawn(context, instance, image_meta, [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self._fetch_image_if_missing(context, vi) [ 1510.659669] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] image_cache(vi, tmp_image_ds_loc) [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] vm_util.copy_virtual_disk( [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] session._wait_for_task(vmdk_copy_task) [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] return self.wait_for_task(task_ref) [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] return evt.wait() [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] result = hub.switch() [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1510.660061] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] return self.greenlet.switch() [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self.f(*self.args, **self.kw) [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] raise exceptions.translate_fault(task_info.error) [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Faults: ['InvalidArgument'] [ 1510.660524] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] [ 1510.660524] env[61649]: INFO nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Terminating instance [ 1510.661600] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.661819] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1510.662051] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-192f3a5c-e77e-4d60-ae0d-32759d7a69f6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.664209] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1510.664403] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1510.665192] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e23562-70f6-4773-b085-ba440bd31b89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.671860] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1510.672086] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db5b0a99-2976-421f-bb86-af618b98b8aa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.674248] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1510.674431] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1510.675467] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b6cb7c2-3f3d-4e90-9150-69e82e705272 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.680423] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Waiting for the task: (returnval){ [ 1510.680423] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5223158c-9963-724c-9da1-437c4f41c55e" [ 1510.680423] env[61649]: _type = "Task" [ 1510.680423] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.687476] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5223158c-9963-724c-9da1-437c4f41c55e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.190255] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1511.190546] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Creating directory with path [datastore1] vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.190776] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a48e3ed-a0f2-40d7-a259-a62ac4b9cb84 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.211863] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Created directory with path [datastore1] vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.211863] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Fetch image to [datastore1] vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1511.211863] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1511.212238] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d263b71-c355-453f-b7d3-2e3ee5dde008 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.218737] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be5b0aa-9606-4ec3-9211-99ea2c39c0a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.227260] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953851a6-1bc2-4464-8c3f-3103ba2006a0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.256543] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3743d1d-01a0-42b8-b13f-afd900f4eceb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.261683] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6b192d41-0b22-44a5-98d8-20abc8c1fffe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.281737] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1511.325348] env[61649]: DEBUG oslo_vmware.rw_handles [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1511.385205] env[61649]: DEBUG oslo_vmware.rw_handles [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1511.385404] env[61649]: DEBUG oslo_vmware.rw_handles [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1514.390351] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1514.390717] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1514.390779] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleting the datastore file [datastore1] 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1514.391041] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-727f91dd-bf6b-4508-993b-0d44ea1c2d83 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.398911] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1514.398911] env[61649]: value = "task-158250" [ 1514.398911] env[61649]: _type = "Task" [ 1514.398911] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.405828] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.908583] env[61649]: DEBUG oslo_vmware.api [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078438} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.908915] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1514.909141] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1514.909352] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1514.909688] env[61649]: INFO nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Took 4.25 seconds to destroy the instance on the hypervisor. [ 1514.911834] env[61649]: DEBUG nova.compute.claims [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1514.912060] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.912319] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.914135] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg a67ae1421e2442f1bb754eacaea949b2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1514.945747] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a67ae1421e2442f1bb754eacaea949b2 [ 1515.112356] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b656a99-3f5f-4ab5-a4f9-2ad9ebb43a91 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.119449] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008debf0-f18c-4574-b5a3-8121f015b044 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.150409] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93e76ce-3a1e-478f-bb5b-cbd95930633c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.157821] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1972a0a5-6a8c-4cc9-8094-db9f173fd546 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.170436] env[61649]: DEBUG nova.compute.provider_tree [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.170921] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d1e250dc624942c8a1f742837aceb9c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.178555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1e250dc624942c8a1f742837aceb9c5 [ 1515.179444] env[61649]: DEBUG nova.scheduler.client.report [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1515.181884] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4f2f76d7fd6b4d9b9cf867efc2aa46a1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.192679] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f2f76d7fd6b4d9b9cf867efc2aa46a1 [ 1515.193375] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.281s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.193881] env[61649]: ERROR nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1515.193881] env[61649]: Faults: ['InvalidArgument'] [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Traceback (most recent call last): [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self.driver.spawn(context, instance, image_meta, [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self._fetch_image_if_missing(context, vi) [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] image_cache(vi, tmp_image_ds_loc) [ 1515.193881] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] vm_util.copy_virtual_disk( [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] session._wait_for_task(vmdk_copy_task) [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] return self.wait_for_task(task_ref) [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] return evt.wait() [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] result = hub.switch() [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] return self.greenlet.switch() [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1515.194185] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] self.f(*self.args, **self.kw) [ 1515.194532] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1515.194532] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] raise exceptions.translate_fault(task_info.error) [ 1515.194532] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1515.194532] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Faults: ['InvalidArgument'] [ 1515.194532] env[61649]: ERROR nova.compute.manager [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] [ 1515.194669] env[61649]: DEBUG nova.compute.utils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1515.195936] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Build of instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 was re-scheduled: A specified parameter was not correct: fileType [ 1515.195936] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1515.196332] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1515.196505] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1515.196675] env[61649]: DEBUG nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1515.196836] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1515.432616] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 970c9de1ac5f421aa0cad2297103084c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.442582] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 970c9de1ac5f421aa0cad2297103084c [ 1515.443145] env[61649]: DEBUG nova.network.neutron [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.443636] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg f53d535d1cbd41cbabd429fd940b6aef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.452366] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f53d535d1cbd41cbabd429fd940b6aef [ 1515.453102] env[61649]: INFO nova.compute.manager [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Took 0.26 seconds to deallocate network for instance. [ 1515.455509] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 9ccf381e05eb4dae9a697161d6daba59 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.486392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ccf381e05eb4dae9a697161d6daba59 [ 1515.490139] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4e4f406770be44a9ae8b1852ff23e32b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.525280] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e4f406770be44a9ae8b1852ff23e32b [ 1515.543867] env[61649]: INFO nova.scheduler.client.report [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted allocations for instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 [ 1515.549739] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 5ed85fb36672457e910563764efe8239 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.559396] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ed85fb36672457e910563764efe8239 [ 1515.560104] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8f4f82d5-95bb-414b-b661-6ed9fc742168 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 544.663s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.560807] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 6892b77b07e741b9833d938b0172be2d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.561623] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 387.533s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.561946] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] During sync_power_state the instance has a pending task (spawning). Skip. [ 1515.562250] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.562952] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 348.419s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.563344] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.563677] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.563994] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.565878] env[61649]: INFO nova.compute.manager [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Terminating instance [ 1515.567831] env[61649]: DEBUG nova.compute.manager [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1515.568157] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1515.568520] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95b07def-d177-4561-9bdd-3923c4e8f6c6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.578327] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b93b2ee-0053-48d9-b311-2c6b457edfd7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.590404] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6892b77b07e741b9833d938b0172be2d [ 1515.591293] env[61649]: DEBUG nova.compute.manager [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: 36d37cda-b987-4c5a-8af1-6eede009e61e] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1515.593456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 099fa9b1e76a4c2d99655eb487045312 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.612206] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5dc4bde6-db61-47c2-a2b8-d2a5515b1525 could not be found. [ 1515.612560] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1515.612864] env[61649]: INFO nova.compute.manager [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1515.613211] env[61649]: DEBUG oslo.service.loopingcall [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.613546] env[61649]: DEBUG nova.compute.manager [-] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1515.613746] env[61649]: DEBUG nova.network.neutron [-] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1515.617586] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 099fa9b1e76a4c2d99655eb487045312 [ 1515.618196] env[61649]: DEBUG nova.compute.manager [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: 36d37cda-b987-4c5a-8af1-6eede009e61e] Instance disappeared before build. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1515.618633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 4622089e9a444d7b83797608d687899a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.626943] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4622089e9a444d7b83797608d687899a [ 1515.637609] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a2e37de6-c0c6-4a75-b735-63fb2f75786f tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "36d37cda-b987-4c5a-8af1-6eede009e61e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.924s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.638203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b6b1c1fe27bd4b7db57d8b269baec64c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.639657] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3e21f1cbd3824cbabe800ef23253e7ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.646412] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6b1c1fe27bd4b7db57d8b269baec64c [ 1515.646868] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e21f1cbd3824cbabe800ef23253e7ab [ 1515.647297] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1515.649280] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 139cb1fc35f24e02b03fbb133e54d37c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.650018] env[61649]: DEBUG nova.network.neutron [-] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.650423] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d9c97cc547d146c2ba1d642816fb1921 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.656496] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9c97cc547d146c2ba1d642816fb1921 [ 1515.656952] env[61649]: INFO nova.compute.manager [-] [instance: 5dc4bde6-db61-47c2-a2b8-d2a5515b1525] Took 0.04 seconds to deallocate network for instance. [ 1515.660230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg e7e70d0b4cc143a3ab0080cbaec942ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.697327] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7e70d0b4cc143a3ab0080cbaec942ce [ 1515.700524] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 139cb1fc35f24e02b03fbb133e54d37c [ 1515.712140] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 38f364900f82402c8c21da6f2a945583 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.717446] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.717701] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.719055] env[61649]: INFO nova.compute.claims [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1515.720688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 3f8bd87a8fc74d619a79375a82a88757 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.747633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38f364900f82402c8c21da6f2a945583 [ 1515.750484] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5dc4bde6-db61-47c2-a2b8-d2a5515b1525" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.750819] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8b52a979-a47d-4c51-bc4b-18d81f2a9f6a tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 2c392737df394b5e845db8e51e9d220b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.754064] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f8bd87a8fc74d619a79375a82a88757 [ 1515.755904] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg baf42eff809243b8a2d0b551e1cdc0d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.760836] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c392737df394b5e845db8e51e9d220b [ 1515.762933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baf42eff809243b8a2d0b551e1cdc0d2 [ 1515.908318] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fcb373-5124-47dd-b26a-aced12911c01 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.915854] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b06844-09ab-496f-bdeb-349e2fff0769 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.948716] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a27cb0-37ad-4ead-891c-8099f9824639 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.955693] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9076533-e8be-4a51-9c62-dadd501f0cfb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.968361] env[61649]: DEBUG nova.compute.provider_tree [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.968835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg cd43e1f4dac54e508caf0f0349295df9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.976133] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd43e1f4dac54e508caf0f0349295df9 [ 1515.977057] env[61649]: DEBUG nova.scheduler.client.report [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1515.979217] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg dc7d9d4efe4d40d39598ecb8298e878e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1515.992993] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc7d9d4efe4d40d39598ecb8298e878e [ 1515.993651] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.994106] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1515.995683] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 50505fc4b87f475dbc62699c1aa0b285 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1516.023479] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50505fc4b87f475dbc62699c1aa0b285 [ 1516.024984] env[61649]: DEBUG nova.compute.utils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1516.025587] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 30d03c97c3054e5bb34814483497b9dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1516.026544] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1516.026716] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1516.036270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30d03c97c3054e5bb34814483497b9dc [ 1516.036798] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1516.038448] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 10107ec265ec46b784571140b0e36304 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1516.065360] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10107ec265ec46b784571140b0e36304 [ 1516.067969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 14a4bacc82c244b69f0ef31adfab31b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1516.074084] env[61649]: DEBUG nova.policy [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eecfef918474dc8ad298d9eb189f56f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3939f446f6f04aa08a0b91101e55572b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1516.098775] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14a4bacc82c244b69f0ef31adfab31b5 [ 1516.099914] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1516.121109] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1516.121361] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1516.121527] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1516.121762] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1516.122124] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1516.122287] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1516.123065] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1516.123299] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1516.123537] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1516.123654] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1516.123836] env[61649]: DEBUG nova.virt.hardware [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1516.124700] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f02e482-5d46-4b86-ad13-13d249919415 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.134156] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e45750-0084-43ca-8d22-6d9b40a1f2ee {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.366108] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Successfully created port: 062a910f-cde4-4544-be94-5ebd1f54e6ca {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1516.935648] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Successfully updated port: 062a910f-cde4-4544-be94-5ebd1f54e6ca {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1516.936157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 7f2eec35f5274eb8811a92e1140bfeb1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1516.945781] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f2eec35f5274eb8811a92e1140bfeb1 [ 1516.946482] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "refresh_cache-5f424618-f9b3-4e9a-898c-2d1a07476cc7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.946603] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "refresh_cache-5f424618-f9b3-4e9a-898c-2d1a07476cc7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.946744] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1516.947395] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg fb7ff4ff8db84a9fb4870c863b937bd4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1516.954408] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb7ff4ff8db84a9fb4870c863b937bd4 [ 1516.983564] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1517.122778] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Updating instance_info_cache with network_info: [{"id": "062a910f-cde4-4544-be94-5ebd1f54e6ca", "address": "fa:16:3e:3b:f4:52", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap062a910f-cd", "ovs_interfaceid": "062a910f-cde4-4544-be94-5ebd1f54e6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.123278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 8e14be9fa6644960aa03bfdc0d33a4f6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1517.133030] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e14be9fa6644960aa03bfdc0d33a4f6 [ 1517.133575] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "refresh_cache-5f424618-f9b3-4e9a-898c-2d1a07476cc7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.133839] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance network_info: |[{"id": "062a910f-cde4-4544-be94-5ebd1f54e6ca", "address": "fa:16:3e:3b:f4:52", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap062a910f-cd", "ovs_interfaceid": "062a910f-cde4-4544-be94-5ebd1f54e6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1517.134209] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:f4:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '062a910f-cde4-4544-be94-5ebd1f54e6ca', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.141794] env[61649]: DEBUG oslo.service.loopingcall [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.142230] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1517.142460] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e669656b-f477-47ca-9ace-dc69654962c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.163977] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.163977] env[61649]: value = "task-158251" [ 1517.163977] env[61649]: _type = "Task" [ 1517.163977] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.175438] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158251, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.483697] env[61649]: DEBUG nova.compute.manager [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Received event network-vif-plugged-062a910f-cde4-4544-be94-5ebd1f54e6ca {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1517.483908] env[61649]: DEBUG oslo_concurrency.lockutils [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Acquiring lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.484136] env[61649]: DEBUG oslo_concurrency.lockutils [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.484267] env[61649]: DEBUG oslo_concurrency.lockutils [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.484468] env[61649]: DEBUG nova.compute.manager [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] No waiting events found dispatching network-vif-plugged-062a910f-cde4-4544-be94-5ebd1f54e6ca {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1517.484586] env[61649]: WARNING nova.compute.manager [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Received unexpected event network-vif-plugged-062a910f-cde4-4544-be94-5ebd1f54e6ca for instance with vm_state building and task_state spawning. [ 1517.484746] env[61649]: DEBUG nova.compute.manager [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Received event network-changed-062a910f-cde4-4544-be94-5ebd1f54e6ca {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1517.484899] env[61649]: DEBUG nova.compute.manager [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Refreshing instance network info cache due to event network-changed-062a910f-cde4-4544-be94-5ebd1f54e6ca. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1517.485080] env[61649]: DEBUG oslo_concurrency.lockutils [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Acquiring lock "refresh_cache-5f424618-f9b3-4e9a-898c-2d1a07476cc7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.485212] env[61649]: DEBUG oslo_concurrency.lockutils [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Acquired lock "refresh_cache-5f424618-f9b3-4e9a-898c-2d1a07476cc7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.485365] env[61649]: DEBUG nova.network.neutron [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Refreshing network info cache for port 062a910f-cde4-4544-be94-5ebd1f54e6ca {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1517.485837] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Expecting reply to msg 1cc9523b1513480492bdfb3ae21adc98 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1517.494818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cc9523b1513480492bdfb3ae21adc98 [ 1517.674002] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158251, 'name': CreateVM_Task, 'duration_secs': 0.294532} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.674186] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1517.674846] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.675010] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.675317] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1517.675566] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667f82b6-9060-4428-8d9f-73e264151950 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.680101] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1517.680101] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52867362-e3dc-92ac-fb37-828c2b1e2091" [ 1517.680101] env[61649]: _type = "Task" [ 1517.680101] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.687354] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52867362-e3dc-92ac-fb37-828c2b1e2091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.877067] env[61649]: DEBUG nova.network.neutron [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Updated VIF entry in instance network info cache for port 062a910f-cde4-4544-be94-5ebd1f54e6ca. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1517.877448] env[61649]: DEBUG nova.network.neutron [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Updating instance_info_cache with network_info: [{"id": "062a910f-cde4-4544-be94-5ebd1f54e6ca", "address": "fa:16:3e:3b:f4:52", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap062a910f-cd", "ovs_interfaceid": "062a910f-cde4-4544-be94-5ebd1f54e6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.877988] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Expecting reply to msg ff6b5f255bdf42e4b3e93760b4622417 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1517.888852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff6b5f255bdf42e4b3e93760b4622417 [ 1517.888852] env[61649]: DEBUG oslo_concurrency.lockutils [req-db4d2a0c-b655-4772-8e74-87d924441e34 req-82264cda-20dc-4984-b92f-f8f476dce5fd service nova] Releasing lock "refresh_cache-5f424618-f9b3-4e9a-898c-2d1a07476cc7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.190575] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.190905] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.191062] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.930441] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.930465] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.930914] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1555.924119] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.928714] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.929983] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.929829] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1557.930032] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1557.930267] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1557.930775] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c273389e62b5450f9f703b5b8dc2391e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1557.948629] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c273389e62b5450f9f703b5b8dc2391e [ 1557.951315] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.951461] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.951593] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.951717] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.951838] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.951955] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.952086] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.952205] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.952321] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.952435] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1557.952585] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1558.680323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 624d28e3fd584f9cb87677c814c2a4a4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1558.689913] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 624d28e3fd584f9cb87677c814c2a4a4 [ 1558.690398] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.929133] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.154427] env[61649]: WARNING oslo_vmware.rw_handles [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1560.154427] env[61649]: ERROR oslo_vmware.rw_handles [ 1560.154819] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1560.156851] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1560.157095] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Copying Virtual Disk [datastore1] vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/e5fb274a-b4b6-4733-9432-aca43ba5fbc7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1560.157384] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8104f49-ed3a-4fe9-8d24-a0b22581c5b3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.164491] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Waiting for the task: (returnval){ [ 1560.164491] env[61649]: value = "task-158252" [ 1560.164491] env[61649]: _type = "Task" [ 1560.164491] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.171964] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Task: {'id': task-158252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.676349] env[61649]: DEBUG oslo_vmware.exceptions [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1560.676653] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.677188] env[61649]: ERROR nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1560.677188] env[61649]: Faults: ['InvalidArgument'] [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Traceback (most recent call last): [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] yield resources [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self.driver.spawn(context, instance, image_meta, [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self._fetch_image_if_missing(context, vi) [ 1560.677188] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] image_cache(vi, tmp_image_ds_loc) [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] vm_util.copy_virtual_disk( [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] session._wait_for_task(vmdk_copy_task) [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] return self.wait_for_task(task_ref) [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] return evt.wait() [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] result = hub.switch() [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1560.677562] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] return self.greenlet.switch() [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self.f(*self.args, **self.kw) [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] raise exceptions.translate_fault(task_info.error) [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Faults: ['InvalidArgument'] [ 1560.677908] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] [ 1560.677908] env[61649]: INFO nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Terminating instance [ 1560.678994] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.679195] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.679451] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46e1867c-1d49-44ac-b65c-0e315bfad7b1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.681656] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1560.681867] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1560.682610] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf12aec-e08e-448e-aaaf-0d199a74a5b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.689906] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1560.690900] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95c14b2d-24c3-4427-aaf9-5625c6f81929 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.692239] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.692409] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1560.693051] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36792256-50ac-4e6f-ad09-468e371f46c5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.698119] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1560.698119] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]529c0e20-415a-a2a3-4def-e5da8c779851" [ 1560.698119] env[61649]: _type = "Task" [ 1560.698119] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.704852] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]529c0e20-415a-a2a3-4def-e5da8c779851, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.755721] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1560.756076] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1560.756359] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Deleting the datastore file [datastore1] 5730229a-fd0c-4df1-9059-cd6ed39e954c {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.756671] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb54c997-b91c-4c7b-95c4-247ec929a385 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.762872] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Waiting for the task: (returnval){ [ 1560.762872] env[61649]: value = "task-158254" [ 1560.762872] env[61649]: _type = "Task" [ 1560.762872] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.770184] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Task: {'id': task-158254, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.929111] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.929636] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.929948] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a3f68d3579674e18a927f8a317c94fab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1560.939011] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3f68d3579674e18a927f8a317c94fab [ 1560.940135] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.940409] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.940678] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.940910] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1560.942017] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7584f4-939b-4f6e-8b33-0ad4db9c1ee8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.950494] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f5277f-4e49-4553-ae5b-582fca46ef6f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.964143] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311e30fa-3750-4fa6-b2a8-6f93ea1e0774 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.970518] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090f6d43-21e7-42b6-9d9c-a9cc5158f177 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.998968] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181749MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1560.999194] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.999456] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.000327] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f78e343c9155468eb0acf8d7bafeb6ed in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.032829] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f78e343c9155468eb0acf8d7bafeb6ed [ 1561.036772] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bd572580197e435db233f70169be7ce9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.045290] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd572580197e435db233f70169be7ce9 [ 1561.062086] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062233] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062356] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062475] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062591] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062703] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062815] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.062925] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.063034] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.063141] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.063642] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b43c99708fa2460ba442450ebe05ea4c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.072865] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b43c99708fa2460ba442450ebe05ea4c [ 1561.073548] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1561.074009] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9d790d4c7dbb441d80ec366f3b23a91a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.082626] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d790d4c7dbb441d80ec366f3b23a91a [ 1561.083245] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1561.083683] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 94472a8855ce4e4cbd96357fca553e9f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.091958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94472a8855ce4e4cbd96357fca553e9f [ 1561.092702] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1561.092923] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1561.093069] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1561.210353] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1561.210512] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1561.210926] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37daee66-eb47-489d-8917-c1bc429df846 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.222134] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1561.222336] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Fetch image to [datastore1] vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1561.222506] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1561.223255] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a013f4-6bad-44c5-8df7-0a8369fbde96 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.230661] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd91754-68f8-460c-9ecd-5250b0648999 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.242470] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86bfa50-e353-45fc-a7c6-4823ccab3979 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.277335] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a48b27d-5158-4449-9d10-cbb2b494710c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.280185] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2482c0-498c-4c8d-a648-d3f667efd269 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.287346] env[61649]: DEBUG oslo_vmware.api [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Task: {'id': task-158254, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078757} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.290415] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1561.290689] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1561.290936] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1561.291246] env[61649]: INFO nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1561.293362] env[61649]: DEBUG nova.compute.claims [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1561.293597] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.293858] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eccca2ac-1baa-429f-993a-1bb28db3943a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.296121] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466e43c0-1364-4a1a-bbf8-933b662dc06b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.329604] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7286565-6b35-49ed-8e27-1a9a21267821 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.332365] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1561.338466] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268e477c-1c06-460f-bcea-82a70bf3280d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.351958] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.352473] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f75b8c2d92d542dfa0dee1195c169119 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.363315] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f75b8c2d92d542dfa0dee1195c169119 [ 1561.363315] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1561.365457] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 680773dc2e7a4b708aa39dc62aec8034 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.376032] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 680773dc2e7a4b708aa39dc62aec8034 [ 1561.376672] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1561.376897] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.377s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.377175] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.084s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.379586] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 38ed266399a24b12a663e8220ceab7fd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.386095] env[61649]: DEBUG oslo_vmware.rw_handles [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1561.442491] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38ed266399a24b12a663e8220ceab7fd [ 1561.448659] env[61649]: DEBUG oslo_vmware.rw_handles [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1561.448659] env[61649]: DEBUG oslo_vmware.rw_handles [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1561.601518] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c31cff-c118-494d-a180-78a41cf3dba3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.609049] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7576a3c-46c3-482b-8c7b-c37eeb0d9b1c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.638870] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a118a52f-80c9-4f1d-9ff1-bd267f0e995d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.645326] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221a4580-8a07-451f-afca-db2a39d0d22d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.657603] env[61649]: DEBUG nova.compute.provider_tree [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.658089] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg e0b9e3b4dd6442138538ae94be1ebad7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.665659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0b9e3b4dd6442138538ae94be1ebad7 [ 1561.666532] env[61649]: DEBUG nova.scheduler.client.report [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1561.668717] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 0210543d449246f485d61483343fe923 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.684284] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0210543d449246f485d61483343fe923 [ 1561.685091] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.685719] env[61649]: ERROR nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1561.685719] env[61649]: Faults: ['InvalidArgument'] [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Traceback (most recent call last): [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self.driver.spawn(context, instance, image_meta, [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self._fetch_image_if_missing(context, vi) [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] image_cache(vi, tmp_image_ds_loc) [ 1561.685719] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] vm_util.copy_virtual_disk( [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] session._wait_for_task(vmdk_copy_task) [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] return self.wait_for_task(task_ref) [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] return evt.wait() [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] result = hub.switch() [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] return self.greenlet.switch() [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1561.686048] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] self.f(*self.args, **self.kw) [ 1561.686378] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1561.686378] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] raise exceptions.translate_fault(task_info.error) [ 1561.686378] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1561.686378] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Faults: ['InvalidArgument'] [ 1561.686378] env[61649]: ERROR nova.compute.manager [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] [ 1561.686511] env[61649]: DEBUG nova.compute.utils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1561.687930] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Build of instance 5730229a-fd0c-4df1-9059-cd6ed39e954c was re-scheduled: A specified parameter was not correct: fileType [ 1561.687930] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1561.688332] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1561.688509] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1561.688679] env[61649]: DEBUG nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1561.688841] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1561.942659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg a58f10a2a2724474a86b3d80d6e205b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.950510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a58f10a2a2724474a86b3d80d6e205b5 [ 1561.951084] env[61649]: DEBUG nova.network.neutron [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.951628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg a107c125d2f5404d84bba44aca439a51 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.960608] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a107c125d2f5404d84bba44aca439a51 [ 1561.961240] env[61649]: INFO nova.compute.manager [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Took 0.27 seconds to deallocate network for instance. [ 1561.963561] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg cd16a8995403452a87d4251c149305a6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1561.994686] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd16a8995403452a87d4251c149305a6 [ 1561.997861] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 37e870dd97684f28a473e895b00adaff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.025986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37e870dd97684f28a473e895b00adaff [ 1562.044328] env[61649]: INFO nova.scheduler.client.report [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Deleted allocations for instance 5730229a-fd0c-4df1-9059-cd6ed39e954c [ 1562.050166] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg a83449e2b7514e9191da3275b66ee41a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.059988] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a83449e2b7514e9191da3275b66ee41a [ 1562.060512] env[61649]: DEBUG oslo_concurrency.lockutils [None req-6b3efcab-6a6f-4e78-8c91-d52cccd105d3 tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 581.087s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.061043] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg c3de62e553554f62803c333a760b38c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.061752] env[61649]: DEBUG oslo_concurrency.lockutils [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 384.343s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.061967] env[61649]: DEBUG oslo_concurrency.lockutils [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Acquiring lock "5730229a-fd0c-4df1-9059-cd6ed39e954c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.062172] env[61649]: DEBUG oslo_concurrency.lockutils [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.062580] env[61649]: DEBUG oslo_concurrency.lockutils [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.064595] env[61649]: INFO nova.compute.manager [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Terminating instance [ 1562.066188] env[61649]: DEBUG nova.compute.manager [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1562.066377] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1562.066874] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0618ec37-f6cc-4433-952b-6f1dec61c862 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.076671] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600a8816-814b-4e96-9df2-8a723d74f0ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.087877] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3de62e553554f62803c333a760b38c7 [ 1562.088422] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1562.090964] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg ff18fd9a8b764466be1b6d3b6cf3cb84 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.109779] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5730229a-fd0c-4df1-9059-cd6ed39e954c could not be found. [ 1562.109973] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1562.110148] env[61649]: INFO nova.compute.manager [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1562.110388] env[61649]: DEBUG oslo.service.loopingcall [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1562.110651] env[61649]: DEBUG nova.compute.manager [-] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1562.110744] env[61649]: DEBUG nova.network.neutron [-] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1562.124584] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff18fd9a8b764466be1b6d3b6cf3cb84 [ 1562.130403] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8246583036064494a2ab0b51cda020d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.136041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8246583036064494a2ab0b51cda020d3 [ 1562.136416] env[61649]: DEBUG nova.network.neutron [-] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.136910] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a1817ac13379411d8b256a9cf09a2626 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.139806] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.139900] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.141301] env[61649]: INFO nova.compute.claims [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.142794] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 3237ba06f2a049bfa62d876bc6e09fe5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.144118] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1817ac13379411d8b256a9cf09a2626 [ 1562.144643] env[61649]: INFO nova.compute.manager [-] [instance: 5730229a-fd0c-4df1-9059-cd6ed39e954c] Took 0.03 seconds to deallocate network for instance. [ 1562.147722] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 650b8b4b5b7d487c94c034b83c42f65a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.173204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 650b8b4b5b7d487c94c034b83c42f65a [ 1562.176317] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3237ba06f2a049bfa62d876bc6e09fe5 [ 1562.177747] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 891447160f3646fea3af35fa109a0200 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.186365] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 891447160f3646fea3af35fa109a0200 [ 1562.189588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 0520502d078e4136a0190c6f19360e44 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.229333] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0520502d078e4136a0190c6f19360e44 [ 1562.234367] env[61649]: DEBUG oslo_concurrency.lockutils [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Lock "5730229a-fd0c-4df1-9059-cd6ed39e954c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.234663] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3d288b-4e53-4f52-ae93-d8365579e6cd tempest-ServerAddressesNegativeTestJSON-624030150 tempest-ServerAddressesNegativeTestJSON-624030150-project-member] Expecting reply to msg 6906f536de254daa8744f2b676d76f51 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.246543] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6906f536de254daa8744f2b676d76f51 [ 1562.336021] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e3291d-e671-4ace-baa6-cff6dbd0c299 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.343800] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aede1be5-ed90-493a-8c3e-30a80fe3b0cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.372211] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ececba-f314-4f1e-8681-04e9ecad9b46 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.378891] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3976240-4044-4dab-bb2f-ce8482d82d25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.391493] env[61649]: DEBUG nova.compute.provider_tree [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.391970] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 90f7280b8a684ff2937f5fe87b5e2a7b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.400085] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90f7280b8a684ff2937f5fe87b5e2a7b [ 1562.400879] env[61649]: DEBUG nova.scheduler.client.report [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1562.403270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 96b797b78dc54061a76aab3236ee0e4f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.415450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96b797b78dc54061a76aab3236ee0e4f [ 1562.416159] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.416618] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1562.418197] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg a9f4296f6d474dae98c8af24485a6eae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.445838] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9f4296f6d474dae98c8af24485a6eae [ 1562.447192] env[61649]: DEBUG nova.compute.utils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1562.447758] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 687470ac22394cefa54ba6fa4e19b54a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.448535] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1562.448700] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1562.459076] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 687470ac22394cefa54ba6fa4e19b54a [ 1562.459600] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1562.461209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg e5136eb9536f4f42ab469c63e9c21244 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.488868] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5136eb9536f4f42ab469c63e9c21244 [ 1562.491574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 2a9947da3d364054b7e0f386b9e0907b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1562.493983] env[61649]: DEBUG nova.policy [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8c6e9a3ba6a48669b1772886e22e023', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a732894bf424b5e9e3e972af47a7314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1562.526417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a9947da3d364054b7e0f386b9e0907b [ 1562.527747] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1562.548363] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1562.548602] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1562.548759] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1562.548936] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1562.549080] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1562.549222] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1562.549450] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1562.549625] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1562.549795] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1562.549959] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1562.550131] env[61649]: DEBUG nova.virt.hardware [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1562.551206] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d1cdaa-978b-41fc-a384-fa140d7f2e4b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.558735] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc550e8-6438-4058-9129-2569b7f6b410 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.813617] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Successfully created port: 6299e29e-ce41-4787-bf5e-d66f141025ea {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.613541] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Successfully updated port: 6299e29e-ce41-4787-bf5e-d66f141025ea {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1563.614045] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg e1d4af2fdf154c388c8e266be410ea41 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1563.621791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1d4af2fdf154c388c8e266be410ea41 [ 1563.622466] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "refresh_cache-4661732c-51dc-4a77-aa32-28049dbd5ad7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.622608] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "refresh_cache-4661732c-51dc-4a77-aa32-28049dbd5ad7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.622752] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1563.623126] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg fd84316b2b2c4c43a1681741c89b2966 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1563.629735] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd84316b2b2c4c43a1681741c89b2966 [ 1563.662166] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1563.798808] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Updating instance_info_cache with network_info: [{"id": "6299e29e-ce41-4787-bf5e-d66f141025ea", "address": "fa:16:3e:a0:1f:74", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6299e29e-ce", "ovs_interfaceid": "6299e29e-ce41-4787-bf5e-d66f141025ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.799309] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 0cb8683793b34039b7d0c3b8e6b6833d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1563.809081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb8683793b34039b7d0c3b8e6b6833d [ 1563.809688] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "refresh_cache-4661732c-51dc-4a77-aa32-28049dbd5ad7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.809968] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance network_info: |[{"id": "6299e29e-ce41-4787-bf5e-d66f141025ea", "address": "fa:16:3e:a0:1f:74", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6299e29e-ce", "ovs_interfaceid": "6299e29e-ce41-4787-bf5e-d66f141025ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1563.810343] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:1f:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6299e29e-ce41-4787-bf5e-d66f141025ea', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1563.817879] env[61649]: DEBUG oslo.service.loopingcall [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1563.818322] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1563.818538] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d9fb25a-53e6-4d40-9c42-3ee31b585af8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.839531] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1563.839531] env[61649]: value = "task-158255" [ 1563.839531] env[61649]: _type = "Task" [ 1563.839531] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.847575] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158255, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.989755] env[61649]: DEBUG nova.compute.manager [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Received event network-vif-plugged-6299e29e-ce41-4787-bf5e-d66f141025ea {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1563.989997] env[61649]: DEBUG oslo_concurrency.lockutils [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Acquiring lock "4661732c-51dc-4a77-aa32-28049dbd5ad7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.990234] env[61649]: DEBUG oslo_concurrency.lockutils [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.990435] env[61649]: DEBUG oslo_concurrency.lockutils [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.990638] env[61649]: DEBUG nova.compute.manager [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] No waiting events found dispatching network-vif-plugged-6299e29e-ce41-4787-bf5e-d66f141025ea {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1563.990838] env[61649]: WARNING nova.compute.manager [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Received unexpected event network-vif-plugged-6299e29e-ce41-4787-bf5e-d66f141025ea for instance with vm_state building and task_state spawning. [ 1563.991026] env[61649]: DEBUG nova.compute.manager [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Received event network-changed-6299e29e-ce41-4787-bf5e-d66f141025ea {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1563.991249] env[61649]: DEBUG nova.compute.manager [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Refreshing instance network info cache due to event network-changed-6299e29e-ce41-4787-bf5e-d66f141025ea. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1563.991503] env[61649]: DEBUG oslo_concurrency.lockutils [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Acquiring lock "refresh_cache-4661732c-51dc-4a77-aa32-28049dbd5ad7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.991639] env[61649]: DEBUG oslo_concurrency.lockutils [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Acquired lock "refresh_cache-4661732c-51dc-4a77-aa32-28049dbd5ad7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.991797] env[61649]: DEBUG nova.network.neutron [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Refreshing network info cache for port 6299e29e-ce41-4787-bf5e-d66f141025ea {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1563.992323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Expecting reply to msg d28c286503ea4b208500fa0980c05c32 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1563.999299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d28c286503ea4b208500fa0980c05c32 [ 1564.230583] env[61649]: DEBUG nova.network.neutron [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Updated VIF entry in instance network info cache for port 6299e29e-ce41-4787-bf5e-d66f141025ea. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1564.230936] env[61649]: DEBUG nova.network.neutron [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Updating instance_info_cache with network_info: [{"id": "6299e29e-ce41-4787-bf5e-d66f141025ea", "address": "fa:16:3e:a0:1f:74", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6299e29e-ce", "ovs_interfaceid": "6299e29e-ce41-4787-bf5e-d66f141025ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.231439] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Expecting reply to msg 5cf2b81ee105473582d45e60143c7496 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1564.240624] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cf2b81ee105473582d45e60143c7496 [ 1564.241223] env[61649]: DEBUG oslo_concurrency.lockutils [req-184eb016-449f-4327-a44c-a5060bf9cd6c req-94a013a9-9bd5-45aa-b60a-35e4f126ec33 service nova] Releasing lock "refresh_cache-4661732c-51dc-4a77-aa32-28049dbd5ad7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.349950] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158255, 'name': CreateVM_Task, 'duration_secs': 0.28014} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.350111] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1564.350822] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1564.350986] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.351298] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1564.351538] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44f3216f-a38e-4ebb-8750-28cef71f9278 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.355767] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1564.355767] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52965f52-8a2c-de2f-c209-976d436137e8" [ 1564.355767] env[61649]: _type = "Task" [ 1564.355767] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.366067] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52965f52-8a2c-de2f-c209-976d436137e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.865444] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.865816] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1564.865886] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.538667] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "28a3b287-8717-42d5-989a-4f66642134f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.538993] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "28a3b287-8717-42d5-989a-4f66642134f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.222392] env[61649]: WARNING oslo_vmware.rw_handles [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1610.222392] env[61649]: ERROR oslo_vmware.rw_handles [ 1610.223053] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1610.224923] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1610.225171] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Copying Virtual Disk [datastore1] vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/a781640a-e0f0-4c64-9c93-6697eafb0881/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1610.225461] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-067d3dd2-449f-4ad4-8739-a63cb0bea506 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.233329] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1610.233329] env[61649]: value = "task-158256" [ 1610.233329] env[61649]: _type = "Task" [ 1610.233329] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.240970] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.743667] env[61649]: DEBUG oslo_vmware.exceptions [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1610.744068] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.744642] env[61649]: ERROR nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1610.744642] env[61649]: Faults: ['InvalidArgument'] [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Traceback (most recent call last): [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] yield resources [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self.driver.spawn(context, instance, image_meta, [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self._fetch_image_if_missing(context, vi) [ 1610.744642] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] image_cache(vi, tmp_image_ds_loc) [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] vm_util.copy_virtual_disk( [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] session._wait_for_task(vmdk_copy_task) [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] return self.wait_for_task(task_ref) [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] return evt.wait() [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] result = hub.switch() [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1610.744990] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] return self.greenlet.switch() [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self.f(*self.args, **self.kw) [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] raise exceptions.translate_fault(task_info.error) [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Faults: ['InvalidArgument'] [ 1610.745332] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] [ 1610.745332] env[61649]: INFO nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Terminating instance [ 1610.746524] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.746731] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1610.746972] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f6aee38-416b-4549-968d-4ae4baf2027d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.749372] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1610.749639] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1610.750395] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32821eb-7244-4bac-881c-bd0647e31ffc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.756808] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1610.757013] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3777e3aa-0db8-4597-ac34-2597e20593ca {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.759851] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1610.760073] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1610.761045] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b835b2d4-aa77-4846-9e07-edb917a69fcb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.766007] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for the task: (returnval){ [ 1610.766007] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522bc3cc-9259-00c3-3770-839acf61fb8d" [ 1610.766007] env[61649]: _type = "Task" [ 1610.766007] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.772778] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522bc3cc-9259-00c3-3770-839acf61fb8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.821455] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1610.821686] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1610.821869] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleting the datastore file [datastore1] 0b0050ff-2714-4068-9956-089c6aa3eff1 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1610.822127] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07ba275e-4882-4d1e-a22a-16931a2ef0fe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.828913] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1610.828913] env[61649]: value = "task-158258" [ 1610.828913] env[61649]: _type = "Task" [ 1610.828913] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.836850] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.983682] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 10fdcde7bbd74671a5dfc0ca7265b874 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1610.993211] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10fdcde7bbd74671a5dfc0ca7265b874 [ 1611.275613] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1611.275963] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Creating directory with path [datastore1] vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1611.276110] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44c8d2ce-4ada-488f-a2a0-099e5fe171f1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.286852] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Created directory with path [datastore1] vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1611.287037] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Fetch image to [datastore1] vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1611.287205] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1611.287893] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156df9de-72cd-4645-9ac5-cf4b24ec34b5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.294278] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60df0fa2-11db-4201-bb2d-3b8e242b3c46 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.302880] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddc72e1-9217-4904-bf37-9932f1195c5e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.335826] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78025f48-501a-42b0-8853-3e7a2d4df988 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.342442] env[61649]: DEBUG oslo_vmware.api [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075603} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.343817] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1611.344020] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1611.344207] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1611.344381] env[61649]: INFO nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1611.346091] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-888c610e-b486-4275-9a7f-8ccfdb4e3652 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.347900] env[61649]: DEBUG nova.compute.claims [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1611.348088] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.348305] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.350127] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 8e7023d9115f4ce1b279b33aa73f8cca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.375889] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1611.383294] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e7023d9115f4ce1b279b33aa73f8cca [ 1611.424299] env[61649]: DEBUG oslo_vmware.rw_handles [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1611.485465] env[61649]: DEBUG oslo_vmware.rw_handles [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1611.485869] env[61649]: DEBUG oslo_vmware.rw_handles [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1611.581210] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dbcd66-e0dc-4f0e-917c-b2cec4ab8105 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.588793] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbc159b-fbde-4f58-852e-d69e4f51e8ff {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.617481] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2de2c2-5f7f-4f24-ab9d-aacc5a16c98a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.623949] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dec93fa-9a3b-4d7c-881c-618545e5e5dc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.636244] env[61649]: DEBUG nova.compute.provider_tree [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1611.636713] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg e8c8e1f011034875b10e6ed2516b60b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.644166] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8c8e1f011034875b10e6ed2516b60b7 [ 1611.645041] env[61649]: DEBUG nova.scheduler.client.report [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1611.647187] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 4ed5b8a5fe2544b0ab3f1d6099a3fad9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.657492] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ed5b8a5fe2544b0ab3f1d6099a3fad9 [ 1611.658127] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.310s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.658630] env[61649]: ERROR nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1611.658630] env[61649]: Faults: ['InvalidArgument'] [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Traceback (most recent call last): [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self.driver.spawn(context, instance, image_meta, [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self._fetch_image_if_missing(context, vi) [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] image_cache(vi, tmp_image_ds_loc) [ 1611.658630] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] vm_util.copy_virtual_disk( [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] session._wait_for_task(vmdk_copy_task) [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] return self.wait_for_task(task_ref) [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] return evt.wait() [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] result = hub.switch() [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] return self.greenlet.switch() [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1611.658982] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] self.f(*self.args, **self.kw) [ 1611.659351] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1611.659351] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] raise exceptions.translate_fault(task_info.error) [ 1611.659351] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1611.659351] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Faults: ['InvalidArgument'] [ 1611.659351] env[61649]: ERROR nova.compute.manager [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] [ 1611.659351] env[61649]: DEBUG nova.compute.utils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1611.660703] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Build of instance 0b0050ff-2714-4068-9956-089c6aa3eff1 was re-scheduled: A specified parameter was not correct: fileType [ 1611.660703] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1611.661071] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1611.661246] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1611.661428] env[61649]: DEBUG nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1611.661593] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1611.876429] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 2b83a447bab149dcadfd044f8c17a8e6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.883590] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b83a447bab149dcadfd044f8c17a8e6 [ 1611.887947] env[61649]: DEBUG nova.network.neutron [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.888156] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg b04b55d09bef4705873dbeb6f147721f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.897340] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b04b55d09bef4705873dbeb6f147721f [ 1611.897965] env[61649]: INFO nova.compute.manager [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Took 0.24 seconds to deallocate network for instance. [ 1611.899698] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg e47a665150834b0893d55ef22ce4648c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.930727] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e47a665150834b0893d55ef22ce4648c [ 1611.933467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 856a4c3bf90548c594ccf0caab998cd5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1611.966158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 856a4c3bf90548c594ccf0caab998cd5 [ 1611.985827] env[61649]: INFO nova.scheduler.client.report [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted allocations for instance 0b0050ff-2714-4068-9956-089c6aa3eff1 [ 1611.991795] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 7553fe516be9410b9fdf4d43d8ecade6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.001157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7553fe516be9410b9fdf4d43d8ecade6 [ 1612.001711] env[61649]: DEBUG oslo_concurrency.lockutils [None req-93791ee8-4e4a-4627-bf39-0faa6eedd872 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.685s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.002262] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg c3b3eba9d20648e196f312b897a3ddc8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.002948] env[61649]: DEBUG oslo_concurrency.lockutils [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 385.845s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.003171] env[61649]: DEBUG oslo_concurrency.lockutils [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "0b0050ff-2714-4068-9956-089c6aa3eff1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.003363] env[61649]: DEBUG oslo_concurrency.lockutils [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.003529] env[61649]: DEBUG oslo_concurrency.lockutils [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.005998] env[61649]: INFO nova.compute.manager [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Terminating instance [ 1612.007804] env[61649]: DEBUG nova.compute.manager [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1612.009029] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1612.009029] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ade3f54b-fe94-4cb9-b41e-f6c36c2356a9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.018269] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e36e08b-fdbb-4e81-a639-39f7922b7519 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.029363] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3b3eba9d20648e196f312b897a3ddc8 [ 1612.029919] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1612.031618] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg ca8d26e289ea4122ae093b9bcf0f06b8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.051603] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b0050ff-2714-4068-9956-089c6aa3eff1 could not be found. [ 1612.051815] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1612.051997] env[61649]: INFO nova.compute.manager [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1612.052569] env[61649]: DEBUG oslo.service.loopingcall [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1612.052826] env[61649]: DEBUG nova.compute.manager [-] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1612.052928] env[61649]: DEBUG nova.network.neutron [-] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1612.062918] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca8d26e289ea4122ae093b9bcf0f06b8 [ 1612.078464] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.078666] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.080174] env[61649]: INFO nova.compute.claims [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1612.081738] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 1210aaa0a9f249ecb58247333d177606 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.100653] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 01f383f15b8040519a61ec213ee8eec2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.107219] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01f383f15b8040519a61ec213ee8eec2 [ 1612.107219] env[61649]: DEBUG nova.network.neutron [-] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.107537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 967029c478f84cf985061a7d2f1ef7b6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.117205] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 967029c478f84cf985061a7d2f1ef7b6 [ 1612.117624] env[61649]: INFO nova.compute.manager [-] [instance: 0b0050ff-2714-4068-9956-089c6aa3eff1] Took 0.06 seconds to deallocate network for instance. [ 1612.121362] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg c4beaf5aa447471fad4e3bf84b08b6da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.127985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1210aaa0a9f249ecb58247333d177606 [ 1612.130048] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 40b01bffec234fbd97cccf5f4dce95ed in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.136707] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40b01bffec234fbd97cccf5f4dce95ed [ 1612.146354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4beaf5aa447471fad4e3bf84b08b6da [ 1612.160196] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg d64132a3c0da4e6dad38ac9db7e8dca7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.197526] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d64132a3c0da4e6dad38ac9db7e8dca7 [ 1612.202667] env[61649]: DEBUG oslo_concurrency.lockutils [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "0b0050ff-2714-4068-9956-089c6aa3eff1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.202996] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-262a3c93-167f-405d-b13f-7c829fb2bd03 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 3768819b8f8647efa7b376e3ea981c0c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.215845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3768819b8f8647efa7b376e3ea981c0c [ 1612.285307] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfb71ed-0587-449f-acc7-cb163c8ea580 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.292602] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3918e7-ef6b-425e-8f88-647ca1e285e4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.322318] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ed8456-8d84-4dd3-ba43-53a1f8c006f0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.329233] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38304331-5216-416a-a145-4753ac8e3217 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.341636] env[61649]: DEBUG nova.compute.provider_tree [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.342129] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 5465e515636849819aafc6ed206691b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.349652] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5465e515636849819aafc6ed206691b5 [ 1612.350510] env[61649]: DEBUG nova.scheduler.client.report [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1612.352699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 017982bc8a3d4a9abe918e765231e31a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.364861] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 017982bc8a3d4a9abe918e765231e31a [ 1612.365965] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.366434] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1612.368021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 4ea4042e7a3a48c18637d217cf7fb108 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.379981] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.395560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ea4042e7a3a48c18637d217cf7fb108 [ 1612.397012] env[61649]: DEBUG nova.compute.utils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1612.397674] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 622c0b0f5a794152b30cd6165d809aa6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.398811] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1612.399007] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1612.405918] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 622c0b0f5a794152b30cd6165d809aa6 [ 1612.406397] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1612.407913] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 7d71db04361744f7b5319542d97984b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.440024] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d71db04361744f7b5319542d97984b0 [ 1612.442627] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 6dec40329d094bc38a98d0106e81b9d9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1612.445858] env[61649]: DEBUG nova.policy [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36a6c4449bd54e3ba357d883177212d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e66a5f940c93453a92ed2df649f69d21', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1612.471677] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dec40329d094bc38a98d0106e81b9d9 [ 1612.472711] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1612.493768] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1612.493993] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1612.494144] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.494319] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1612.494467] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.494610] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1612.494810] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1612.494961] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1612.495122] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1612.495296] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1612.495465] env[61649]: DEBUG nova.virt.hardware [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1612.496518] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be6ff4f-5e5a-41d7-8f80-b7f563699127 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.505684] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb131385-23be-4398-bc42-a53d2bc76afc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.756146] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Successfully created port: 93a8b6ae-69d6-49e7-8d77-d3d03cec0813 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1613.384800] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Successfully updated port: 93a8b6ae-69d6-49e7-8d77-d3d03cec0813 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1613.385388] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg ad9337e1d2b4478baf5d53a90995ca7b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1613.393757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad9337e1d2b4478baf5d53a90995ca7b [ 1613.394470] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "refresh_cache-545947a4-3f1a-44fe-ac02-ec5e2e5844d5" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.394550] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquired lock "refresh_cache-545947a4-3f1a-44fe-ac02-ec5e2e5844d5" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.394685] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1613.395077] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 5863f5660d71463da3864d27fb7e81b1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1613.402022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5863f5660d71463da3864d27fb7e81b1 [ 1613.432084] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1613.746945] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Updating instance_info_cache with network_info: [{"id": "93a8b6ae-69d6-49e7-8d77-d3d03cec0813", "address": "fa:16:3e:e7:43:e9", "network": {"id": "8b157287-6a44-457e-b931-3a009c74cd4c", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-574121636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e66a5f940c93453a92ed2df649f69d21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a8b6ae-69", "ovs_interfaceid": "93a8b6ae-69d6-49e7-8d77-d3d03cec0813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.747454] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 676570bc4c3a42999a52eed2a4830351 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1613.759867] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 676570bc4c3a42999a52eed2a4830351 [ 1613.760450] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Releasing lock "refresh_cache-545947a4-3f1a-44fe-ac02-ec5e2e5844d5" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.760764] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance network_info: |[{"id": "93a8b6ae-69d6-49e7-8d77-d3d03cec0813", "address": "fa:16:3e:e7:43:e9", "network": {"id": "8b157287-6a44-457e-b931-3a009c74cd4c", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-574121636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e66a5f940c93453a92ed2df649f69d21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a8b6ae-69", "ovs_interfaceid": "93a8b6ae-69d6-49e7-8d77-d3d03cec0813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1613.761260] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:43:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93a8b6ae-69d6-49e7-8d77-d3d03cec0813', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1613.768895] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Creating folder: Project (e66a5f940c93453a92ed2df649f69d21). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1613.769398] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27d9545a-4a93-4e99-9287-a6001f7e5539 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.783394] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Created folder: Project (e66a5f940c93453a92ed2df649f69d21) in parent group-v51588. [ 1613.783678] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Creating folder: Instances. Parent ref: group-v51685. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1613.783918] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0bd4e98f-0d10-4861-9f31-1bcf767b6b70 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.792192] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Created folder: Instances in parent group-v51685. [ 1613.792410] env[61649]: DEBUG oslo.service.loopingcall [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.792584] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1613.792767] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1aba8821-d6d2-44d2-ac1f-914483dd681c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.810357] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1613.810357] env[61649]: value = "task-158261" [ 1613.810357] env[61649]: _type = "Task" [ 1613.810357] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.817693] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158261, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.952982] env[61649]: DEBUG nova.compute.manager [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Received event network-vif-plugged-93a8b6ae-69d6-49e7-8d77-d3d03cec0813 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1613.953200] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Acquiring lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.953533] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.953732] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.953902] env[61649]: DEBUG nova.compute.manager [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] No waiting events found dispatching network-vif-plugged-93a8b6ae-69d6-49e7-8d77-d3d03cec0813 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1613.954071] env[61649]: WARNING nova.compute.manager [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Received unexpected event network-vif-plugged-93a8b6ae-69d6-49e7-8d77-d3d03cec0813 for instance with vm_state building and task_state spawning. [ 1613.954230] env[61649]: DEBUG nova.compute.manager [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Received event network-changed-93a8b6ae-69d6-49e7-8d77-d3d03cec0813 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1613.954385] env[61649]: DEBUG nova.compute.manager [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Refreshing instance network info cache due to event network-changed-93a8b6ae-69d6-49e7-8d77-d3d03cec0813. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1613.954570] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Acquiring lock "refresh_cache-545947a4-3f1a-44fe-ac02-ec5e2e5844d5" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.954705] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Acquired lock "refresh_cache-545947a4-3f1a-44fe-ac02-ec5e2e5844d5" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.954858] env[61649]: DEBUG nova.network.neutron [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Refreshing network info cache for port 93a8b6ae-69d6-49e7-8d77-d3d03cec0813 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1613.955337] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Expecting reply to msg cf693f15c3e74af9b93a262b11bee42e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1613.962821] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf693f15c3e74af9b93a262b11bee42e [ 1614.249558] env[61649]: DEBUG nova.network.neutron [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Updated VIF entry in instance network info cache for port 93a8b6ae-69d6-49e7-8d77-d3d03cec0813. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1614.249967] env[61649]: DEBUG nova.network.neutron [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Updating instance_info_cache with network_info: [{"id": "93a8b6ae-69d6-49e7-8d77-d3d03cec0813", "address": "fa:16:3e:e7:43:e9", "network": {"id": "8b157287-6a44-457e-b931-3a009c74cd4c", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-574121636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e66a5f940c93453a92ed2df649f69d21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a8b6ae-69", "ovs_interfaceid": "93a8b6ae-69d6-49e7-8d77-d3d03cec0813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.250459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Expecting reply to msg 0885e37d6560423780237a3af28d6883 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1614.258859] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0885e37d6560423780237a3af28d6883 [ 1614.259477] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab57a855-fabe-42d1-a985-98902727047f req-536eb56a-24b4-48d7-a1cb-e320d1af7e43 service nova] Releasing lock "refresh_cache-545947a4-3f1a-44fe-ac02-ec5e2e5844d5" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.320311] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158261, 'name': CreateVM_Task, 'duration_secs': 0.279374} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.320490] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1614.321147] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.321311] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.321746] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1614.322043] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6325dff7-1f9c-4a68-881f-be4f7952055e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.326722] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Waiting for the task: (returnval){ [ 1614.326722] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5277d256-5ac4-f1fc-d140-d491290b25d2" [ 1614.326722] env[61649]: _type = "Task" [ 1614.326722] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.334703] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5277d256-5ac4-f1fc-d140-d491290b25d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.836434] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.836753] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.836904] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.928593] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.928761] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1615.929644] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1617.924434] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1617.929084] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.834284] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg c1ceb7021cf644079ed461dcac1a7c65 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1618.846173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1ceb7021cf644079ed461dcac1a7c65 [ 1618.846676] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.929093] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.929428] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1618.929428] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1618.930026] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 877224f7fc6c4792962bc9f48f981bc4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1618.949056] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 877224f7fc6c4792962bc9f48f981bc4 [ 1618.950530] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.950712] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.950849] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.950978] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951101] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951222] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951342] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951459] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951578] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951695] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1618.951883] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1620.929295] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1620.929666] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1620.929765] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1620.930451] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9799e26cc37f4048b45fe79a6f710b5c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1620.939641] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9799e26cc37f4048b45fe79a6f710b5c [ 1620.940604] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.940810] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.940976] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.941128] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1620.942280] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fddbcb-99c0-45f7-9cf5-db7c23074635 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.951851] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d269ef60-03a7-4fec-ab6b-6eb920e752ad {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.964911] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501fc86e-6f9d-4a10-8544-fcf3322d2cb3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.970732] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77ca28f-de21-40c8-abef-f3843bb6a2fb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.997987] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181683MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1620.998129] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.998309] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.999359] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 181bbb6c8bf04cee98bb0f477d1086b1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1621.049965] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 181bbb6c8bf04cee98bb0f477d1086b1 [ 1621.054024] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fe8067ebad8e4b24b0826efbdc45006b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1621.063175] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe8067ebad8e4b24b0826efbdc45006b [ 1621.080110] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.080269] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.080402] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.080547] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.080677] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.080809] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.080925] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.081039] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.081150] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.081260] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1621.081815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 62d1f0065a0e45f280f9b6f97e509d21 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1621.095452] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62d1f0065a0e45f280f9b6f97e509d21 [ 1621.095452] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1621.095452] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 846714bd47104b71abc43e2664d1d44d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1621.106106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 846714bd47104b71abc43e2664d1d44d [ 1621.106765] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1621.106977] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1621.107116] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1621.247301] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd6ce49-13fc-4fae-b02d-cfd0cf2e7299 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.254868] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f279e1-fe5c-4315-acd3-c38bc03f6669 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.284974] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9d3bd2-05eb-46e7-b062-b0495871da71 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.291611] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9f0c6c-9a46-4d26-b959-456717ace5c0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.304180] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.304614] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e7c1012129c94650922b7b00fe10e1d0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1621.311796] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7c1012129c94650922b7b00fe10e1d0 [ 1621.312661] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1621.315096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 94a5e4629ac14f998d8bf62463168fdd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1621.327017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94a5e4629ac14f998d8bf62463168fdd [ 1621.327647] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1621.327818] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.329s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.322821] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.323510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8c3aee7c43ff4bdbb8864f0f27564359 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1624.341330] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c3aee7c43ff4bdbb8864f0f27564359 [ 1628.652480] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "4e47e82d-780e-4c23-8071-083beab2a53f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.652885] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4e47e82d-780e-4c23-8071-083beab2a53f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.187028] env[61649]: WARNING oslo_vmware.rw_handles [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1660.187028] env[61649]: ERROR oslo_vmware.rw_handles [ 1660.187899] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1660.189329] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1660.189604] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Copying Virtual Disk [datastore1] vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/6e144e88-66aa-4f56-8e55-24c7d76416c7/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1660.189890] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92245efc-6be8-4a44-8d05-0fe5b469d422 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.198951] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for the task: (returnval){ [ 1660.198951] env[61649]: value = "task-158262" [ 1660.198951] env[61649]: _type = "Task" [ 1660.198951] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.207066] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Task: {'id': task-158262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.708918] env[61649]: DEBUG oslo_vmware.exceptions [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1660.709154] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.709755] env[61649]: ERROR nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1660.709755] env[61649]: Faults: ['InvalidArgument'] [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Traceback (most recent call last): [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] yield resources [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self.driver.spawn(context, instance, image_meta, [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self._fetch_image_if_missing(context, vi) [ 1660.709755] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] image_cache(vi, tmp_image_ds_loc) [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] vm_util.copy_virtual_disk( [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] session._wait_for_task(vmdk_copy_task) [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] return self.wait_for_task(task_ref) [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] return evt.wait() [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] result = hub.switch() [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1660.710168] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] return self.greenlet.switch() [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self.f(*self.args, **self.kw) [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] raise exceptions.translate_fault(task_info.error) [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Faults: ['InvalidArgument'] [ 1660.710572] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] [ 1660.710572] env[61649]: INFO nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Terminating instance [ 1660.711667] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.711873] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1660.712119] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4549da15-bced-4af1-af70-3eea6d9250d4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.715313] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.715474] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquired lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.715642] env[61649]: DEBUG nova.network.neutron [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1660.716063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg ec85e2018c374bd2beaf1e29be31ef8b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1660.722345] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.722519] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1660.723455] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec85e2018c374bd2beaf1e29be31ef8b [ 1660.723787] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a08b8a65-6a83-4e1f-b5dc-f57d28022bf8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.730337] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1660.730337] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52043549-ce0f-5e6b-05b7-f5fcf3de17a9" [ 1660.730337] env[61649]: _type = "Task" [ 1660.730337] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.737379] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52043549-ce0f-5e6b-05b7-f5fcf3de17a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.743699] env[61649]: DEBUG nova.network.neutron [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1660.819811] env[61649]: DEBUG nova.network.neutron [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.820417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 75720e45349d4c20bb5132393395cf98 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1660.828871] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75720e45349d4c20bb5132393395cf98 [ 1660.829507] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Releasing lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.829929] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1660.830184] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1660.831286] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1729c5eb-d91d-450e-bba1-ae65e9e80044 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.839091] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1660.839367] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d9adaaa-fa9a-424a-b305-09507b3e92a5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.870805] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1660.871098] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1660.871341] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Deleting the datastore file [datastore1] 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1660.871642] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63953d0e-0dc5-4920-8b59-70f84714004f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.876913] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for the task: (returnval){ [ 1660.876913] env[61649]: value = "task-158264" [ 1660.876913] env[61649]: _type = "Task" [ 1660.876913] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.884559] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Task: {'id': task-158264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.240289] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1661.240634] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating directory with path [datastore1] vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1661.240938] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-293ad012-f8c5-4772-83cc-7b1a03412880 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.252463] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created directory with path [datastore1] vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1661.252753] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Fetch image to [datastore1] vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1661.252992] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1661.253721] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130e5b0c-10d5-4b78-af21-750ffc04d5b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.260051] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3153b98b-3286-436a-9da7-c11707163505 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.268657] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fa1cd0-75ae-4fdb-b8ce-183973a6e781 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.298984] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92292006-9d3c-4031-9cb4-a08578d5fbd0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.304591] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b228e86c-62e7-4b88-ba1a-f7bdb8f806f8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.324360] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1661.371582] env[61649]: DEBUG oslo_vmware.rw_handles [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1661.432653] env[61649]: DEBUG oslo_vmware.rw_handles [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1661.432792] env[61649]: DEBUG oslo_vmware.rw_handles [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1661.436466] env[61649]: DEBUG oslo_vmware.api [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Task: {'id': task-158264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041772} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.436734] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1661.436928] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1661.437100] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1661.437269] env[61649]: INFO nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1661.437496] env[61649]: DEBUG oslo.service.loopingcall [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1661.437695] env[61649]: DEBUG nova.compute.manager [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network deallocation for instance since networking was not requested. {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1661.439870] env[61649]: DEBUG nova.compute.claims [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1661.440042] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.440254] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.442075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 1c266a6e24be48b5bebdd0c3b8a86efe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.472806] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c266a6e24be48b5bebdd0c3b8a86efe [ 1661.635316] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196fbe52-abcb-44e2-9a9c-feeec1242345 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.642768] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08bf2ee-9003-4efe-bfaa-727bb6beb3cf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.671314] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73863e5a-6cf8-4612-a10c-cdbbf2f5271d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.678333] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca81984-4c56-4996-b64d-10c341e047dd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.691078] env[61649]: DEBUG nova.compute.provider_tree [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1661.691525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 98ea70adbdcc40b0bfc701c8bdec9a55 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.698606] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98ea70adbdcc40b0bfc701c8bdec9a55 [ 1661.700026] env[61649]: DEBUG nova.scheduler.client.report [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1661.701669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 1776bfadc0274f03ac9a01fc3873d809 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.712551] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1776bfadc0274f03ac9a01fc3873d809 [ 1661.713222] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.273s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.713733] env[61649]: ERROR nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1661.713733] env[61649]: Faults: ['InvalidArgument'] [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Traceback (most recent call last): [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self.driver.spawn(context, instance, image_meta, [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self._fetch_image_if_missing(context, vi) [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] image_cache(vi, tmp_image_ds_loc) [ 1661.713733] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] vm_util.copy_virtual_disk( [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] session._wait_for_task(vmdk_copy_task) [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] return self.wait_for_task(task_ref) [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] return evt.wait() [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] result = hub.switch() [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] return self.greenlet.switch() [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1661.714107] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] self.f(*self.args, **self.kw) [ 1661.714497] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1661.714497] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] raise exceptions.translate_fault(task_info.error) [ 1661.714497] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1661.714497] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Faults: ['InvalidArgument'] [ 1661.714497] env[61649]: ERROR nova.compute.manager [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] [ 1661.714497] env[61649]: DEBUG nova.compute.utils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1661.716349] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Build of instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 was re-scheduled: A specified parameter was not correct: fileType [ 1661.716349] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1661.716724] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1661.716944] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.717089] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquired lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.717248] env[61649]: DEBUG nova.network.neutron [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1661.717612] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg c5e4f45663324895ac26d52db94bcb84 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.723174] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5e4f45663324895ac26d52db94bcb84 [ 1661.742288] env[61649]: DEBUG nova.network.neutron [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1661.788246] env[61649]: DEBUG nova.network.neutron [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.788736] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 5a2ba22a1ff64c57ad6595f2bed71d67 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.797404] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a2ba22a1ff64c57ad6595f2bed71d67 [ 1661.797935] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Releasing lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.798142] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1661.798319] env[61649]: DEBUG nova.compute.manager [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Skipping network deallocation for instance since networking was not requested. {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1661.799948] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 4ef3d7fe88f74dfeac2f358e088db268 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.828786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ef3d7fe88f74dfeac2f358e088db268 [ 1661.831329] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg a1d53c152e3e4a3d8c90d1f001aff58b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.859091] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1d53c152e3e4a3d8c90d1f001aff58b [ 1661.881187] env[61649]: INFO nova.scheduler.client.report [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Deleted allocations for instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 [ 1661.886671] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 04ba3d231b9b46519ba27d9415c4a61a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.895204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04ba3d231b9b46519ba27d9415c4a61a [ 1661.895659] env[61649]: DEBUG oslo_concurrency.lockutils [None req-2b1a0305-53a2-4b74-b9ea-93b44b5a8bec tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 571.434s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.896150] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 15ededc203944c379da3e5cd289ba1c3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.897198] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 375.109s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.897421] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.897623] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.897791] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.899543] env[61649]: INFO nova.compute.manager [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Terminating instance [ 1661.900983] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquiring lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.901132] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Acquired lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.901293] env[61649]: DEBUG nova.network.neutron [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1661.901642] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 41a4af4452e54f07922f7126b78ca95d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.908151] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41a4af4452e54f07922f7126b78ca95d [ 1661.913488] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15ededc203944c379da3e5cd289ba1c3 [ 1661.913885] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1661.915533] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 2ac190178bfc4ecfbab2c1cc69b2bbea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.927353] env[61649]: DEBUG nova.network.neutron [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1661.948204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ac190178bfc4ecfbab2c1cc69b2bbea [ 1661.961894] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.962112] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.963642] env[61649]: INFO nova.compute.claims [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1661.965990] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg f46d314f8e75421d819a2efab0a6b0be in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.977108] env[61649]: DEBUG nova.network.neutron [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.977511] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 73a2f2b160dd4abab876838b6b8d0592 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1661.985573] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73a2f2b160dd4abab876838b6b8d0592 [ 1661.986049] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Releasing lock "refresh_cache-0fb0aaae-b6d2-418d-81a9-74671f4b97c6" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.986411] env[61649]: DEBUG nova.compute.manager [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1661.986599] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1661.986894] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-730cedb5-9bea-4407-b01e-9635317b277f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.996168] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6e1594-3889-4342-b33f-30d0f8b9c544 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.006763] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f46d314f8e75421d819a2efab0a6b0be [ 1662.008311] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 9b31056202ab4ef6abd87e48c1fd7f2b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.014471] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b31056202ab4ef6abd87e48c1fd7f2b [ 1662.026302] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0fb0aaae-b6d2-418d-81a9-74671f4b97c6 could not be found. [ 1662.026565] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1662.026807] env[61649]: INFO nova.compute.manager [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1662.027101] env[61649]: DEBUG oslo.service.loopingcall [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.027397] env[61649]: DEBUG nova.compute.manager [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1662.027561] env[61649]: DEBUG nova.network.neutron [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1662.047129] env[61649]: DEBUG nova.network.neutron [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1662.047660] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a40e6aae73dd4503af795e60554b9229 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.055097] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a40e6aae73dd4503af795e60554b9229 [ 1662.055505] env[61649]: DEBUG nova.network.neutron [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.056017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8bcd59d0d9ea40aa9bcc2bc982e27dc0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.065468] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bcd59d0d9ea40aa9bcc2bc982e27dc0 [ 1662.065767] env[61649]: INFO nova.compute.manager [-] [instance: 0fb0aaae-b6d2-418d-81a9-74671f4b97c6] Took 0.04 seconds to deallocate network for instance. [ 1662.069443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 392419a1141746fa90fb57d2f51934b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.095986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 392419a1141746fa90fb57d2f51934b4 [ 1662.108474] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg dd12cb966fa04ee9843ac6785fbc1a77 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.142783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd12cb966fa04ee9843ac6785fbc1a77 [ 1662.145810] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Lock "0fb0aaae-b6d2-418d-81a9-74671f4b97c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.249s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.146306] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8de9762a-e3d2-4851-9bb1-85eeab5a865a tempest-ServersAaction247Test-901698392 tempest-ServersAaction247Test-901698392-project-member] Expecting reply to msg 33e184421d744658aa2d212110c9430e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.155560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33e184421d744658aa2d212110c9430e [ 1662.159538] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87344615-6a37-4552-9798-09fd0b9e3213 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.167341] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c3cceb-7c27-4b70-912e-345e237776ad {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.198159] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b9ab9e-a69f-4f46-bb48-10f83a9ff320 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.205509] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33c0dff-5444-4cf9-80e8-65f96309ebc7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.218110] env[61649]: DEBUG nova.compute.provider_tree [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1662.218564] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg f8ad9e715a7b41ca85c5804d336173e0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.226949] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8ad9e715a7b41ca85c5804d336173e0 [ 1662.227805] env[61649]: DEBUG nova.scheduler.client.report [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1662.230031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg f242b82431b3406b9fa33ac4fde30153 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.240147] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f242b82431b3406b9fa33ac4fde30153 [ 1662.240862] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.279s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.241313] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1662.243017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 528759aea5ce4c9fb993ad971a43184b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.277211] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 528759aea5ce4c9fb993ad971a43184b [ 1662.278609] env[61649]: DEBUG nova.compute.utils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1662.279181] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 20ca601afa4543c5976ae3fb873d9c43 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.280044] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1662.280228] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1662.288062] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20ca601afa4543c5976ae3fb873d9c43 [ 1662.288567] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1662.290296] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 7cdc36ab78db42b497cf2a7549e8ecad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.319746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cdc36ab78db42b497cf2a7549e8ecad [ 1662.322571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg e4a4447664e14e91bab9b371d618bc85 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1662.326706] env[61649]: DEBUG nova.policy [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73f73ecceb844895bbdf126203901353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca010f93a4ee40db98cd5885b47d21d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1662.352818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4a4447664e14e91bab9b371d618bc85 [ 1662.353992] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1662.376334] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1662.376572] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1662.376731] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1662.376911] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1662.377058] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1662.377205] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1662.377424] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1662.377600] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1662.377772] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1662.377934] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1662.378100] env[61649]: DEBUG nova.virt.hardware [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1662.378930] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349dfb2e-a731-400d-93ea-8b8f5df5499a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.386516] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d078448d-7c3f-4db1-b7bb-6168b77e361e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.875404] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Successfully created port: 2b1d943e-3339-4674-8a09-78efd6a629f6 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1663.355778] env[61649]: DEBUG nova.compute.manager [req-ce4508bb-60c8-4896-9b6f-cf6b70cd38bd req-8920ae5e-ec3e-4d0e-8a0f-4d579481f1c9 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Received event network-vif-plugged-2b1d943e-3339-4674-8a09-78efd6a629f6 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1663.356046] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce4508bb-60c8-4896-9b6f-cf6b70cd38bd req-8920ae5e-ec3e-4d0e-8a0f-4d579481f1c9 service nova] Acquiring lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.356250] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce4508bb-60c8-4896-9b6f-cf6b70cd38bd req-8920ae5e-ec3e-4d0e-8a0f-4d579481f1c9 service nova] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.356418] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce4508bb-60c8-4896-9b6f-cf6b70cd38bd req-8920ae5e-ec3e-4d0e-8a0f-4d579481f1c9 service nova] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.356585] env[61649]: DEBUG nova.compute.manager [req-ce4508bb-60c8-4896-9b6f-cf6b70cd38bd req-8920ae5e-ec3e-4d0e-8a0f-4d579481f1c9 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] No waiting events found dispatching network-vif-plugged-2b1d943e-3339-4674-8a09-78efd6a629f6 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1663.356749] env[61649]: WARNING nova.compute.manager [req-ce4508bb-60c8-4896-9b6f-cf6b70cd38bd req-8920ae5e-ec3e-4d0e-8a0f-4d579481f1c9 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Received unexpected event network-vif-plugged-2b1d943e-3339-4674-8a09-78efd6a629f6 for instance with vm_state building and task_state spawning. [ 1663.491026] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Successfully updated port: 2b1d943e-3339-4674-8a09-78efd6a629f6 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1663.491545] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 43e0d0445fb0471ca5bd1e880668cfab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1663.499550] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43e0d0445fb0471ca5bd1e880668cfab [ 1663.500438] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "refresh_cache-db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.500545] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "refresh_cache-db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.500648] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1663.501006] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg a52459f24bf246d7a45b9ae368866c9a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1663.508493] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a52459f24bf246d7a45b9ae368866c9a [ 1663.560048] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1663.717515] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Updating instance_info_cache with network_info: [{"id": "2b1d943e-3339-4674-8a09-78efd6a629f6", "address": "fa:16:3e:15:77:1e", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1d943e-33", "ovs_interfaceid": "2b1d943e-3339-4674-8a09-78efd6a629f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.718034] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 189c2aeae2764a14990057f6982aca1c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1663.727657] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 189c2aeae2764a14990057f6982aca1c [ 1663.728290] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "refresh_cache-db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.728589] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance network_info: |[{"id": "2b1d943e-3339-4674-8a09-78efd6a629f6", "address": "fa:16:3e:15:77:1e", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1d943e-33", "ovs_interfaceid": "2b1d943e-3339-4674-8a09-78efd6a629f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1663.728942] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:77:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604056d6-6dd6-47fa-9eaa-6863a3a7c488', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b1d943e-3339-4674-8a09-78efd6a629f6', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1663.736435] env[61649]: DEBUG oslo.service.loopingcall [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.736907] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1663.737136] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e96a218-ca1f-4f76-9245-b71c7a88c0c1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.758009] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1663.758009] env[61649]: value = "task-158265" [ 1663.758009] env[61649]: _type = "Task" [ 1663.758009] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.768214] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158265, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.266913] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158265, 'name': CreateVM_Task, 'duration_secs': 0.281027} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.267080] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1664.268136] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.268308] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.268617] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1664.268856] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4526298-d47f-451f-acb3-aab3d39a8be5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.273568] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1664.273568] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52014d0c-1633-01b0-31bb-acdd3ad76c35" [ 1664.273568] env[61649]: _type = "Task" [ 1664.273568] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.281702] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52014d0c-1633-01b0-31bb-acdd3ad76c35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.783986] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.784314] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1664.784462] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.489053] env[61649]: DEBUG nova.compute.manager [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Received event network-changed-2b1d943e-3339-4674-8a09-78efd6a629f6 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1665.489261] env[61649]: DEBUG nova.compute.manager [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Refreshing instance network info cache due to event network-changed-2b1d943e-3339-4674-8a09-78efd6a629f6. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1665.489431] env[61649]: DEBUG oslo_concurrency.lockutils [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] Acquiring lock "refresh_cache-db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.489608] env[61649]: DEBUG oslo_concurrency.lockutils [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] Acquired lock "refresh_cache-db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.489777] env[61649]: DEBUG nova.network.neutron [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Refreshing network info cache for port 2b1d943e-3339-4674-8a09-78efd6a629f6 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1665.490243] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] Expecting reply to msg 8008075504da4d81a5b2e6799dc19ab5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1665.497531] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8008075504da4d81a5b2e6799dc19ab5 [ 1665.968547] env[61649]: DEBUG nova.network.neutron [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Updated VIF entry in instance network info cache for port 2b1d943e-3339-4674-8a09-78efd6a629f6. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1665.968892] env[61649]: DEBUG nova.network.neutron [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Updating instance_info_cache with network_info: [{"id": "2b1d943e-3339-4674-8a09-78efd6a629f6", "address": "fa:16:3e:15:77:1e", "network": {"id": "90f0d83b-388c-413d-b2b0-909a46293040", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1619441242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca010f93a4ee40db98cd5885b47d21d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1d943e-33", "ovs_interfaceid": "2b1d943e-3339-4674-8a09-78efd6a629f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.969409] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] Expecting reply to msg 0c9e8b1d800f46bfab457d8c64ce4038 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1665.980180] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c9e8b1d800f46bfab457d8c64ce4038 [ 1665.980845] env[61649]: DEBUG oslo_concurrency.lockutils [req-a3b55d20-7f88-4bad-9a61-2db7825ed02c req-4eee93b6-8510-448a-ab9b-bc5d29811298 service nova] Releasing lock "refresh_cache-db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.750217] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.750489] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.928585] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.929354] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.929740] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1676.931155] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1677.924760] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1677.928375] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.929612] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.929973] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1680.929973] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1680.930509] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e913bace8b2643438dd395f71818ac42 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1680.947603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e913bace8b2643438dd395f71818ac42 [ 1680.949737] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.949885] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950016] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950142] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950264] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950395] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950512] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950645] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950763] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950879] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1680.950997] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1680.951431] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.929804] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.930113] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.930392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e7b0d81929084c0eac0ae902865fd491 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1682.939022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7b0d81929084c0eac0ae902865fd491 [ 1682.940017] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.940239] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.940403] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.940576] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1682.941639] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709cbe70-94e4-4c32-842a-18f84e230dde {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.950513] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fef3be3-2db1-4140-8c8a-d66a164d82c4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.965658] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c627b16d-48c7-4bf3-9412-58ce70e8c006 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.972189] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6aa71d4-f5ed-4407-ad8f-7fa627719e8c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.000076] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181728MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1683.000232] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.000422] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.001268] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 24f6a066a1dd43ebaf409664c4c65c87 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.033826] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24f6a066a1dd43ebaf409664c4c65c87 [ 1683.037845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fc02dc1966c546ed9ef7abf8779e1909 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.046184] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc02dc1966c546ed9ef7abf8779e1909 [ 1683.063009] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f67180f-6b27-4487-8858-5f57fcffd041 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063148] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063268] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063383] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063497] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063610] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063720] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063830] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.063938] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.064059] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.064577] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5cefbc08717e494f93641d67eae5efdc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.074156] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cefbc08717e494f93641d67eae5efdc [ 1683.074841] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1683.075294] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f9858294d2484e2cbb72ff18f2d0d07e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.084039] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 1f0c9effb8f748bc894e35609e2028a2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.085226] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9858294d2484e2cbb72ff18f2d0d07e [ 1683.086182] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1683.086696] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 93af48546dd94a1bb658743ce658140c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.091253] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f0c9effb8f748bc894e35609e2028a2 [ 1683.091852] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.094913] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93af48546dd94a1bb658743ce658140c [ 1683.095719] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1683.096045] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1683.096288] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1683.245030] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062f7482-b29e-4f7f-98d2-7204fa363983 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.252076] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d86c870-9efe-49d6-bdd3-c3271b15d00d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.282326] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7295c168-ec02-41c3-84a7-5726eb2a3cfb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.288996] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1c7a35-7eaa-4188-985f-6bddde751448 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.301474] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.301882] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3e8ad23197e648bcaaa26e0997552f0d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.309324] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e8ad23197e648bcaaa26e0997552f0d [ 1683.310192] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1683.312365] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 91e7b3fd9e314375978b3fb9984c0ab5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1683.325650] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91e7b3fd9e314375978b3fb9984c0ab5 [ 1683.326287] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1683.326464] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.326s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.422622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg a0b37e5b291944e398b43740ca615eaa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1702.431527] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0b37e5b291944e398b43740ca615eaa [ 1702.431984] env[61649]: DEBUG oslo_concurrency.lockutils [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.154373] env[61649]: WARNING oslo_vmware.rw_handles [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1711.154373] env[61649]: ERROR oslo_vmware.rw_handles [ 1711.155104] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1711.156848] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1711.157096] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Copying Virtual Disk [datastore1] vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/0c412d64-9ca0-46d0-93cc-6bb1dfd91b1d/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1711.157392] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2aca92c-4067-4a6a-b800-a193e3dba6c4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.165261] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1711.165261] env[61649]: value = "task-158266" [ 1711.165261] env[61649]: _type = "Task" [ 1711.165261] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.173095] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.675605] env[61649]: DEBUG oslo_vmware.exceptions [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1711.675861] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.676421] env[61649]: ERROR nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1711.676421] env[61649]: Faults: ['InvalidArgument'] [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Traceback (most recent call last): [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] yield resources [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self.driver.spawn(context, instance, image_meta, [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self._fetch_image_if_missing(context, vi) [ 1711.676421] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] image_cache(vi, tmp_image_ds_loc) [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] vm_util.copy_virtual_disk( [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] session._wait_for_task(vmdk_copy_task) [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] return self.wait_for_task(task_ref) [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] return evt.wait() [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] result = hub.switch() [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1711.676780] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] return self.greenlet.switch() [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self.f(*self.args, **self.kw) [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] raise exceptions.translate_fault(task_info.error) [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Faults: ['InvalidArgument'] [ 1711.677112] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] [ 1711.677112] env[61649]: INFO nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Terminating instance [ 1711.678288] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1711.678510] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1711.678757] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4db3d44a-ac90-4c9d-b51f-d0681744b8bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.682195] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1711.682381] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1711.683085] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9804cd09-9746-4b32-b78b-f9aff5527e2c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.689387] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1711.689605] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7eb4823-bdcc-492b-8bb4-489fe0866042 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.691636] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1711.691811] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1711.692741] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab0c640-4077-4a46-b6a5-582893606345 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.697950] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 1711.697950] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ab6859-5b4d-e43a-986b-6a158aecef3b" [ 1711.697950] env[61649]: _type = "Task" [ 1711.697950] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.704828] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52ab6859-5b4d-e43a-986b-6a158aecef3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.755505] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1711.755724] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1711.755901] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleting the datastore file [datastore1] 5f67180f-6b27-4487-8858-5f57fcffd041 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1711.756186] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbc423f1-213b-4746-a851-466faa25c6cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.762049] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 1711.762049] env[61649]: value = "task-158268" [ 1711.762049] env[61649]: _type = "Task" [ 1711.762049] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.769152] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.207619] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1712.207956] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating directory with path [datastore1] vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1712.208104] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74ec6285-ff7b-445c-8f17-7783b66deada {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.219002] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created directory with path [datastore1] vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1712.219189] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Fetch image to [datastore1] vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1712.219357] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1712.220101] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445709f3-5c8d-418e-9af9-f290677bf694 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.226269] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba43e88-8709-4c94-a7ac-d7c81ae7e576 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.234809] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa20f8b3-4295-4c6b-bf1a-ad25b07a4db9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.266464] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcef8841-7bee-46e8-aa0f-3c4219ce4355 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.272847] env[61649]: DEBUG oslo_vmware.api [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077692} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.274196] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1712.274387] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1712.274563] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1712.274743] env[61649]: INFO nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1712.276475] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-57216f67-d781-4741-a44c-114ae104c777 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.278259] env[61649]: DEBUG nova.compute.claims [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1712.278427] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.278638] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.280602] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg c495d745c07d4c1dbefd67bc7e99b967 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1712.300226] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1712.316187] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c495d745c07d4c1dbefd67bc7e99b967 [ 1712.348492] env[61649]: DEBUG oslo_vmware.rw_handles [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1712.412464] env[61649]: DEBUG oslo_vmware.rw_handles [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1712.412707] env[61649]: DEBUG oslo_vmware.rw_handles [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1712.534725] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97b37c7-884e-425e-bbda-658b4a1b3fa8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.542445] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15261cc8-39f0-406d-b490-382ed9620856 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.571524] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873180dc-8788-46a8-9270-f10fcab6e021 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.578728] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b5cdf5-3764-4ab0-a677-ff43f1acfde9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.591823] env[61649]: DEBUG nova.compute.provider_tree [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.592219] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 8d25057efa8b403599c11be44924bb28 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1712.600408] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d25057efa8b403599c11be44924bb28 [ 1712.601499] env[61649]: DEBUG nova.scheduler.client.report [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1712.603805] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 5100a45682a44fa4bcf6b10c27b66547 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1712.614417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5100a45682a44fa4bcf6b10c27b66547 [ 1712.615098] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.336s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.615678] env[61649]: ERROR nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1712.615678] env[61649]: Faults: ['InvalidArgument'] [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Traceback (most recent call last): [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self.driver.spawn(context, instance, image_meta, [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self._fetch_image_if_missing(context, vi) [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] image_cache(vi, tmp_image_ds_loc) [ 1712.615678] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] vm_util.copy_virtual_disk( [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] session._wait_for_task(vmdk_copy_task) [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] return self.wait_for_task(task_ref) [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] return evt.wait() [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] result = hub.switch() [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] return self.greenlet.switch() [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1712.616051] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] self.f(*self.args, **self.kw) [ 1712.616384] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1712.616384] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] raise exceptions.translate_fault(task_info.error) [ 1712.616384] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1712.616384] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Faults: ['InvalidArgument'] [ 1712.616384] env[61649]: ERROR nova.compute.manager [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] [ 1712.616384] env[61649]: DEBUG nova.compute.utils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1712.617817] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Build of instance 5f67180f-6b27-4487-8858-5f57fcffd041 was re-scheduled: A specified parameter was not correct: fileType [ 1712.617817] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1712.618198] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1712.618367] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1712.618540] env[61649]: DEBUG nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1712.618704] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1713.042037] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg bb2c6e350c7d4c0284a1bb5cb90b188f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.052642] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb2c6e350c7d4c0284a1bb5cb90b188f [ 1713.053219] env[61649]: DEBUG nova.network.neutron [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.053726] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 873c4944ddd047fea356aa6398c67fc2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.062434] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 873c4944ddd047fea356aa6398c67fc2 [ 1713.062994] env[61649]: INFO nova.compute.manager [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Took 0.44 seconds to deallocate network for instance. [ 1713.064755] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 7b22ac501ee64700bea5313d722d2198 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.096313] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b22ac501ee64700bea5313d722d2198 [ 1713.099138] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 8fddb614524948b88d47d56e7891c5fb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.128805] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fddb614524948b88d47d56e7891c5fb [ 1713.150132] env[61649]: INFO nova.scheduler.client.report [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleted allocations for instance 5f67180f-6b27-4487-8858-5f57fcffd041 [ 1713.155671] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 33fa595192ad4903b106e8151612d9f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.169076] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33fa595192ad4903b106e8151612d9f2 [ 1713.169764] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ec8232ef-c5ff-48df-b9eb-dc6f7e368d27 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "5f67180f-6b27-4487-8858-5f57fcffd041" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.072s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.170425] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 6547a42f3a1c4dae97ec7fbcb375ab58 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.171245] env[61649]: DEBUG oslo_concurrency.lockutils [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "5f67180f-6b27-4487-8858-5f57fcffd041" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 404.421s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.171606] env[61649]: DEBUG oslo_concurrency.lockutils [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "5f67180f-6b27-4487-8858-5f57fcffd041-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.171973] env[61649]: DEBUG oslo_concurrency.lockutils [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "5f67180f-6b27-4487-8858-5f57fcffd041-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.172331] env[61649]: DEBUG oslo_concurrency.lockutils [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "5f67180f-6b27-4487-8858-5f57fcffd041-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.174358] env[61649]: INFO nova.compute.manager [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Terminating instance [ 1713.176151] env[61649]: DEBUG nova.compute.manager [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1713.176472] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1713.177031] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21572b09-5070-4057-a356-5cebf46c9709 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.189239] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe229106-d2f0-4def-ba62-f11b73d1c606 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.201302] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6547a42f3a1c4dae97ec7fbcb375ab58 [ 1713.202026] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1713.204307] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg ab57af77596b467e9f30bc6459fe7490 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.222345] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f67180f-6b27-4487-8858-5f57fcffd041 could not be found. [ 1713.222925] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1713.223215] env[61649]: INFO nova.compute.manager [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1713.223569] env[61649]: DEBUG oslo.service.loopingcall [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1713.223921] env[61649]: DEBUG nova.compute.manager [-] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1713.224132] env[61649]: DEBUG nova.network.neutron [-] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1713.235356] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab57af77596b467e9f30bc6459fe7490 [ 1713.241017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0a82b8dda9dd4db08b5e93e0124d4077 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.249461] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a82b8dda9dd4db08b5e93e0124d4077 [ 1713.249797] env[61649]: DEBUG nova.network.neutron [-] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.250136] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0b08fc5a7dcb48ec9f7a7e797873b060 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.251416] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.251637] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.253072] env[61649]: INFO nova.compute.claims [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1713.254649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 3ef63b1b732643d4a841934133960c05 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.256602] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b08fc5a7dcb48ec9f7a7e797873b060 [ 1713.256983] env[61649]: INFO nova.compute.manager [-] [instance: 5f67180f-6b27-4487-8858-5f57fcffd041] Took 0.03 seconds to deallocate network for instance. [ 1713.260057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 6d4f9f2391974ef588a52a984724c907 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.284563] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ef63b1b732643d4a841934133960c05 [ 1713.286199] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 25d6c30567714171a6459a3100032321 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.292810] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25d6c30567714171a6459a3100032321 [ 1713.328941] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d4f9f2391974ef588a52a984724c907 [ 1713.341308] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg bab9a1daa2e145239f1b8ab9f36c453e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.375257] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bab9a1daa2e145239f1b8ab9f36c453e [ 1713.377962] env[61649]: DEBUG oslo_concurrency.lockutils [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "5f67180f-6b27-4487-8858-5f57fcffd041" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.378287] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-108d1778-0860-4d10-88a1-78c97b44c494 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 04bf1b0259a24e49934f296884b37e69 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.388625] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04bf1b0259a24e49934f296884b37e69 [ 1713.445726] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece8da33-5f69-4143-93e7-d73902b6b278 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.453565] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23410c88-21bc-4f22-9acd-1faee3102fd4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.484392] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3a3453-ac65-46be-9cc0-97000df43017 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.490894] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359ca787-0ab4-4689-a4ea-c015f5d67feb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.503179] env[61649]: DEBUG nova.compute.provider_tree [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1713.503648] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 11a600272698443db169e27a528a8b19 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.510702] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11a600272698443db169e27a528a8b19 [ 1713.511564] env[61649]: DEBUG nova.scheduler.client.report [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1713.513734] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 3a0646ea8610467bb0ea25618a9cbc78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.526538] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a0646ea8610467bb0ea25618a9cbc78 [ 1713.527222] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.527693] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1713.529393] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg eac5087e594849e1b816979f20983ecc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.557941] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eac5087e594849e1b816979f20983ecc [ 1713.559570] env[61649]: DEBUG nova.compute.utils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1713.560172] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 091ee8733e42479a89703e69a2f95ec4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.561561] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1713.561724] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1713.571553] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 091ee8733e42479a89703e69a2f95ec4 [ 1713.572149] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1713.573814] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg ad89fc7be4ff4c34be7ba2a2194ed935 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.601766] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad89fc7be4ff4c34be7ba2a2194ed935 [ 1713.604603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 089e2ef9ed7d4f839a60af393f33b3f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1713.606678] env[61649]: DEBUG nova.policy [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0551dadfa2643d18f591f7c00dab53e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e0bd6f2d26e442f92498e358016a346', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1713.634614] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 089e2ef9ed7d4f839a60af393f33b3f1 [ 1713.635726] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1713.656930] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1713.657145] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1713.657292] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1713.657555] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1713.657736] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1713.657886] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1713.658093] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1713.658249] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1713.658413] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1713.658574] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1713.658748] env[61649]: DEBUG nova.virt.hardware [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1713.659641] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89bcd97-49ae-4a90-ac02-7e2c37c56fd5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.668415] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e151bc7b-ff44-4982-b301-19332d3bed25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.947965] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Successfully created port: 17589ab7-182b-4cad-9b87-f10d76761cd4 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1714.451770] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Successfully updated port: 17589ab7-182b-4cad-9b87-f10d76761cd4 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1714.452258] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 7402a89679f045138815a8b845765eb1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1714.460564] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7402a89679f045138815a8b845765eb1 [ 1714.461436] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "refresh_cache-28a3b287-8717-42d5-989a-4f66642134f7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.461436] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "refresh_cache-28a3b287-8717-42d5-989a-4f66642134f7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.461583] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1714.461980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg c302d5cf3f2c42f8a8ceaccbee398ec3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1714.469239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c302d5cf3f2c42f8a8ceaccbee398ec3 [ 1714.507814] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1714.651140] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Updating instance_info_cache with network_info: [{"id": "17589ab7-182b-4cad-9b87-f10d76761cd4", "address": "fa:16:3e:b7:c3:e9", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17589ab7-18", "ovs_interfaceid": "17589ab7-182b-4cad-9b87-f10d76761cd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.651634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 0786668e378e44c68a4e954680a17000 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1714.664954] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0786668e378e44c68a4e954680a17000 [ 1714.665716] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "refresh_cache-28a3b287-8717-42d5-989a-4f66642134f7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.665880] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance network_info: |[{"id": "17589ab7-182b-4cad-9b87-f10d76761cd4", "address": "fa:16:3e:b7:c3:e9", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17589ab7-18", "ovs_interfaceid": "17589ab7-182b-4cad-9b87-f10d76761cd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1714.666205] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:c3:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17589ab7-182b-4cad-9b87-f10d76761cd4', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1714.674271] env[61649]: DEBUG oslo.service.loopingcall [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.674785] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1714.675018] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c142d20-448f-4f84-9dc2-8ada25541f32 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.696981] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1714.696981] env[61649]: value = "task-158269" [ 1714.696981] env[61649]: _type = "Task" [ 1714.696981] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.704572] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158269, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.092547] env[61649]: DEBUG nova.compute.manager [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Received event network-vif-plugged-17589ab7-182b-4cad-9b87-f10d76761cd4 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1715.092950] env[61649]: DEBUG oslo_concurrency.lockutils [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Acquiring lock "28a3b287-8717-42d5-989a-4f66642134f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.093317] env[61649]: DEBUG oslo_concurrency.lockutils [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Lock "28a3b287-8717-42d5-989a-4f66642134f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.093641] env[61649]: DEBUG oslo_concurrency.lockutils [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Lock "28a3b287-8717-42d5-989a-4f66642134f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.093951] env[61649]: DEBUG nova.compute.manager [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] No waiting events found dispatching network-vif-plugged-17589ab7-182b-4cad-9b87-f10d76761cd4 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1715.094245] env[61649]: WARNING nova.compute.manager [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Received unexpected event network-vif-plugged-17589ab7-182b-4cad-9b87-f10d76761cd4 for instance with vm_state building and task_state spawning. [ 1715.094536] env[61649]: DEBUG nova.compute.manager [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Received event network-changed-17589ab7-182b-4cad-9b87-f10d76761cd4 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1715.094833] env[61649]: DEBUG nova.compute.manager [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Refreshing instance network info cache due to event network-changed-17589ab7-182b-4cad-9b87-f10d76761cd4. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1715.095160] env[61649]: DEBUG oslo_concurrency.lockutils [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Acquiring lock "refresh_cache-28a3b287-8717-42d5-989a-4f66642134f7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.095425] env[61649]: DEBUG oslo_concurrency.lockutils [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Acquired lock "refresh_cache-28a3b287-8717-42d5-989a-4f66642134f7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.095734] env[61649]: DEBUG nova.network.neutron [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Refreshing network info cache for port 17589ab7-182b-4cad-9b87-f10d76761cd4 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1715.096331] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Expecting reply to msg e5ba8ca9e5a6482bb0b7f52e1f8e0856 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1715.104081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5ba8ca9e5a6482bb0b7f52e1f8e0856 [ 1715.206267] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158269, 'name': CreateVM_Task, 'duration_secs': 0.277917} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.206664] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1715.207452] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.207793] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.208246] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1715.208615] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aed4050e-d962-44e1-a747-9a518ac32a0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.213337] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 1715.213337] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52178fd7-8643-e93a-eb62-03b2a9f117b3" [ 1715.213337] env[61649]: _type = "Task" [ 1715.213337] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.229411] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.229863] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1715.230261] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.380620] env[61649]: DEBUG nova.network.neutron [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Updated VIF entry in instance network info cache for port 17589ab7-182b-4cad-9b87-f10d76761cd4. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1715.380943] env[61649]: DEBUG nova.network.neutron [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Updating instance_info_cache with network_info: [{"id": "17589ab7-182b-4cad-9b87-f10d76761cd4", "address": "fa:16:3e:b7:c3:e9", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17589ab7-18", "ovs_interfaceid": "17589ab7-182b-4cad-9b87-f10d76761cd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.381450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Expecting reply to msg 6d61bf00be88488aa680fdaff6c26108 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1715.389535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d61bf00be88488aa680fdaff6c26108 [ 1715.390084] env[61649]: DEBUG oslo_concurrency.lockutils [req-bb826318-bd82-45ff-8fbc-35da2e6a1db3 req-75f0e92c-b5ee-4d4d-9781-167daeb6e5d0 service nova] Releasing lock "refresh_cache-28a3b287-8717-42d5-989a-4f66642134f7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.984810] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 536f72d183084713847326ee7b7877d0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1730.993982] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 536f72d183084713847326ee7b7877d0 [ 1735.326030] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.326353] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.326466] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1737.930516] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.978310] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.978913] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e033ee58bddb4f4c94d8ebc0d6d1f3b2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1737.996521] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e033ee58bddb4f4c94d8ebc0d6d1f3b2 [ 1737.998792] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 1737.998792] env[61649]: value = "domain-c8" [ 1737.998792] env[61649]: _type = "ClusterComputeResource" [ 1737.998792] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1738.000350] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f537e0-df3e-4cd4-aae5-74f4cf217b81 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.017277] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 10 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1738.017461] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid aa39503a-2342-421e-928f-35ec7c8e47fb {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.017662] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid d6e8f17f-40c4-46e0-a900-d92d1da01ed8 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.017825] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid b6243867-9546-4663-9d48-5c040537490b {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.017978] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 4b87e74a-2408-466f-b1c2-68330c31fb9d {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.018128] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid ff225293-ad72-499a-9b5b-147d0bc40350 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.018275] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 5f424618-f9b3-4e9a-898c-2d1a07476cc7 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.018425] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 4661732c-51dc-4a77-aa32-28049dbd5ad7 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.018569] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.018717] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.018864] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 28a3b287-8717-42d5-989a-4f66642134f7 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 1738.019185] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "aa39503a-2342-421e-928f-35ec7c8e47fb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.019412] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.019654] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "b6243867-9546-4663-9d48-5c040537490b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.019857] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.020068] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "ff225293-ad72-499a-9b5b-147d0bc40350" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.020270] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.020462] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.020694] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.020896] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.021083] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "28a3b287-8717-42d5-989a-4f66642134f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.972069] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.924280] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.928605] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.930464] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.930822] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1741.930822] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1741.931335] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 933f823eff48496fbe723f9c6ed362b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1741.948435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 933f823eff48496fbe723f9c6ed362b5 [ 1741.950560] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.950726] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.950863] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.950988] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951111] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951229] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951347] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951462] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951579] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951697] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1741.951814] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1741.952322] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.952459] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 1741.952725] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 123f7e6866174a48b53cb546d2debc5a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1741.958927] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 123f7e6866174a48b53cb546d2debc5a [ 1742.929832] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1742.930057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 679121d822104731a5e9fc8b09a3bf8e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1742.936285] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 679121d822104731a5e9fc8b09a3bf8e [ 1744.936577] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.936888] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.937160] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 99e72131070c4c10a186cde444ad3859 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1744.947026] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99e72131070c4c10a186cde444ad3859 [ 1744.947992] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.948209] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.948369] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.948516] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1744.949652] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2ca1f1-0a65-4fc1-9d21-226c22191369 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.958237] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5cb38c-ea97-4896-9de9-cbe6b7f73109 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.972021] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9adb35-14d0-4ed8-96ca-11f927ad359b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.977904] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46ee2dd-d6ec-4607-a3db-cb263efe9334 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.006622] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181773MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1745.006768] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.006957] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.007736] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 92814d7d663f4993b6bd24ff87d082a4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1745.039654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92814d7d663f4993b6bd24ff87d082a4 [ 1745.043757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c35b2ad7804f4e18889223f364bba776 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1745.051930] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c35b2ad7804f4e18889223f364bba776 [ 1745.068511] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance aa39503a-2342-421e-928f-35ec7c8e47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.068664] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.068850] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.068996] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.069118] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.069237] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.069352] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.069463] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.069605] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.069726] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.070209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 11082a3d7ff0456d9fe94ad3921e821d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1745.079106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11082a3d7ff0456d9fe94ad3921e821d [ 1745.079739] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1745.080204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 431195913b1a46bfbe97af302a885bf1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1745.088654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 431195913b1a46bfbe97af302a885bf1 [ 1745.089241] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1745.089436] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1745.089601] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1745.214867] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b39fad4-f1b1-4e4e-a812-76c321a7d00f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.222226] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bfd957-fe31-4362-8042-ec1547fa829b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.251592] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21f1b61-89b6-4f88-9c88-ea1adac61d5b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.257989] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06203c1-93e3-492d-8df0-0db81b84c252 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.270337] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.270789] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 83013312d29145c98bc8fc5734d1bf91 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1745.277584] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83013312d29145c98bc8fc5734d1bf91 [ 1745.278399] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1745.280583] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg dcfb7153d46149d88d041a752366ffd5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1745.291958] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcfb7153d46149d88d041a752366ffd5 [ 1745.292571] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1745.292747] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.286s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.281834] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1749.281834] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 757acb9ec32f4b9b99131fd0c2421043 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1749.297875] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 757acb9ec32f4b9b99131fd0c2421043 [ 1752.930152] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.930543] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 1752.930972] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c1a93946c5fb4a3b9ede24c1657d189c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1752.940121] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1a93946c5fb4a3b9ede24c1657d189c [ 1752.940644] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 1756.035156] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "d8503feb-d1df-4e1f-8357-e080e8bdb174" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.035501] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.272787] env[61649]: WARNING oslo_vmware.rw_handles [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1760.272787] env[61649]: ERROR oslo_vmware.rw_handles [ 1760.273496] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1760.275213] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1760.275468] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Copying Virtual Disk [datastore1] vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/ce33e390-0ce3-4ae6-bfd0-d96843984c6f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1760.275755] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23dfd5e3-6c2f-436e-b03f-d6d9c8ca6332 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.286140] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 1760.286140] env[61649]: value = "task-158270" [ 1760.286140] env[61649]: _type = "Task" [ 1760.286140] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.295244] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.795796] env[61649]: DEBUG oslo_vmware.exceptions [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1760.796113] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.796664] env[61649]: ERROR nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1760.796664] env[61649]: Faults: ['InvalidArgument'] [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Traceback (most recent call last): [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] yield resources [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self.driver.spawn(context, instance, image_meta, [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self._fetch_image_if_missing(context, vi) [ 1760.796664] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] image_cache(vi, tmp_image_ds_loc) [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] vm_util.copy_virtual_disk( [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] session._wait_for_task(vmdk_copy_task) [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] return self.wait_for_task(task_ref) [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] return evt.wait() [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] result = hub.switch() [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1760.797069] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] return self.greenlet.switch() [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self.f(*self.args, **self.kw) [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] raise exceptions.translate_fault(task_info.error) [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Faults: ['InvalidArgument'] [ 1760.797457] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] [ 1760.797457] env[61649]: INFO nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Terminating instance [ 1760.798586] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.798797] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1760.799031] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd3e6e1d-0819-41c9-bfd1-32f9fd46c52d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.801370] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1760.801561] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1760.802261] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5359519b-3ca2-42dd-8625-7601c4fc9ca8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.808693] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1760.808894] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-246a3904-f47b-44d4-a803-0386f2b51439 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.810872] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1760.811062] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1760.812082] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3288ca7-9e41-4b11-8c8e-8b3432335c6e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.816870] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Waiting for the task: (returnval){ [ 1760.816870] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52041c90-ffa1-7412-d373-bf9f1d14adb8" [ 1760.816870] env[61649]: _type = "Task" [ 1760.816870] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.823449] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52041c90-ffa1-7412-d373-bf9f1d14adb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.880405] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1760.883234] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1760.883234] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleting the datastore file [datastore1] aa39503a-2342-421e-928f-35ec7c8e47fb {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1760.883234] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4f66b17-3693-4d90-b268-248e9fba3900 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.887757] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 1760.887757] env[61649]: value = "task-158272" [ 1760.887757] env[61649]: _type = "Task" [ 1760.887757] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.894553] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158272, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.327327] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1761.327610] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Creating directory with path [datastore1] vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1761.327823] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-432e2bf1-9046-4e06-b5e3-b7c7888ba64a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.338523] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Created directory with path [datastore1] vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1761.338706] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Fetch image to [datastore1] vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1761.338872] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1761.339602] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c256877-a303-46ef-b86d-e49b2e828911 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.345764] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0861b9-bb81-455b-aad0-d0063faf2cb1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.354214] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1a472c-94d3-4cd8-ab75-c1f8d63b79de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.384169] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45029f8d-fe9b-45d7-80a7-27a352b56e0d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.392030] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75047647-d441-4d61-a95e-1b87579db3c0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.396092] env[61649]: DEBUG oslo_vmware.api [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158272, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074526} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.396583] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1761.396767] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1761.396933] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1761.397108] env[61649]: INFO nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1761.399133] env[61649]: DEBUG nova.compute.claims [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1761.399295] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.399542] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.401487] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg f57bb2dc3bc84ba5a9e95afaa030fe26 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1761.416062] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1761.433228] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f57bb2dc3bc84ba5a9e95afaa030fe26 [ 1761.466856] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1761.525691] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1761.525876] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1761.628811] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68230551-0e08-4637-ae78-62eeb0e6cc52 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.636092] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a38de52-2ab0-4726-8b41-f6af19aed8df {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.664190] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a47e695-1502-4284-a88b-871f5351a558 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.671171] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f593ea-b17d-4dd4-9251-6ba3d12ed34a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.683268] env[61649]: DEBUG nova.compute.provider_tree [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1761.683736] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 6d25cbf40d354cc0838c0905d72e10a0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1761.691330] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d25cbf40d354cc0838c0905d72e10a0 [ 1761.692221] env[61649]: DEBUG nova.scheduler.client.report [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1761.694438] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 966ca0156ed64911adbfc772b51f52a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1761.704767] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 966ca0156ed64911adbfc772b51f52a5 [ 1761.705419] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.705993] env[61649]: ERROR nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1761.705993] env[61649]: Faults: ['InvalidArgument'] [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Traceback (most recent call last): [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self.driver.spawn(context, instance, image_meta, [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self._fetch_image_if_missing(context, vi) [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] image_cache(vi, tmp_image_ds_loc) [ 1761.705993] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] vm_util.copy_virtual_disk( [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] session._wait_for_task(vmdk_copy_task) [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] return self.wait_for_task(task_ref) [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] return evt.wait() [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] result = hub.switch() [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] return self.greenlet.switch() [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1761.706557] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] self.f(*self.args, **self.kw) [ 1761.707141] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1761.707141] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] raise exceptions.translate_fault(task_info.error) [ 1761.707141] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1761.707141] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Faults: ['InvalidArgument'] [ 1761.707141] env[61649]: ERROR nova.compute.manager [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] [ 1761.707141] env[61649]: DEBUG nova.compute.utils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1761.708039] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Build of instance aa39503a-2342-421e-928f-35ec7c8e47fb was re-scheduled: A specified parameter was not correct: fileType [ 1761.708039] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1761.708422] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1761.708595] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1761.708765] env[61649]: DEBUG nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1761.708928] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1761.998269] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg dbb6caa37ce748d78fef705d986956d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.003543] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbb6caa37ce748d78fef705d986956d5 [ 1762.004102] env[61649]: DEBUG nova.network.neutron [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.004575] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 2ec71cfc4de8484c9e9441838fea1a73 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.020161] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec71cfc4de8484c9e9441838fea1a73 [ 1762.020804] env[61649]: INFO nova.compute.manager [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Took 0.31 seconds to deallocate network for instance. [ 1762.022596] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 432d78b0184e4e6884e7a516b9566c5c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.052876] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 432d78b0184e4e6884e7a516b9566c5c [ 1762.055597] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 0184950753124798a147ba8f6493c879 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.087355] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0184950753124798a147ba8f6493c879 [ 1762.105765] env[61649]: INFO nova.scheduler.client.report [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleted allocations for instance aa39503a-2342-421e-928f-35ec7c8e47fb [ 1762.111747] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg a405978fa30d461fa21ad06fc36bc23f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.121369] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a405978fa30d461fa21ad06fc36bc23f [ 1762.121886] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7bd51398-a0d4-43aa-8451-0275c0fea031 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.131s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.122393] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 1de45970914a4a04aa3bb0b552de2c2c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.123231] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.798s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.123295] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "aa39503a-2342-421e-928f-35ec7c8e47fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.123653] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.123838] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.125771] env[61649]: INFO nova.compute.manager [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Terminating instance [ 1762.127624] env[61649]: DEBUG nova.compute.manager [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1762.127817] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1762.128373] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f14eed8-345d-487a-a8dd-60eae2cac733 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.138223] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397dfad9-76c9-4d53-a00a-8e231259decb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.149472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1de45970914a4a04aa3bb0b552de2c2c [ 1762.149472] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1762.150806] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg e9c8d1c98ab14d66a039f19f3785d6a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.168288] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa39503a-2342-421e-928f-35ec7c8e47fb could not be found. [ 1762.168397] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1762.168579] env[61649]: INFO nova.compute.manager [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1762.168823] env[61649]: DEBUG oslo.service.loopingcall [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1762.169047] env[61649]: DEBUG nova.compute.manager [-] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1762.169143] env[61649]: DEBUG nova.network.neutron [-] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1762.178640] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9c8d1c98ab14d66a039f19f3785d6a5 [ 1762.186667] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a104078c26c24583b36109c400b906c3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.192055] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a104078c26c24583b36109c400b906c3 [ 1762.192382] env[61649]: DEBUG nova.network.neutron [-] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.192744] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8e186f2597164f58b9f014d69d4db777 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.195369] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.195369] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.196118] env[61649]: INFO nova.compute.claims [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1762.197596] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg f17bb89742b74012939d372ffd00e656 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.200662] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e186f2597164f58b9f014d69d4db777 [ 1762.201180] env[61649]: INFO nova.compute.manager [-] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] Took 0.03 seconds to deallocate network for instance. [ 1762.204290] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 36fd79c5852343d895002cf406f805dd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.232985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f17bb89742b74012939d372ffd00e656 [ 1762.233419] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36fd79c5852343d895002cf406f805dd [ 1762.235115] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 8063f56c953d47e3bdb541379428cb96 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.241526] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8063f56c953d47e3bdb541379428cb96 [ 1762.250803] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg d98bf8da443c44d39c3b4b43ca4577b3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.285349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d98bf8da443c44d39c3b4b43ca4577b3 [ 1762.288133] env[61649]: DEBUG oslo_concurrency.lockutils [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.288459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-ea766373-24af-46c1-8af8-40ba6cb745fc tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 7f384187c73a448db29ddc36b1ad4a00 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.289680] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 24.270s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.289864] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: aa39503a-2342-421e-928f-35ec7c8e47fb] During sync_power_state the instance has a pending task (deleting). Skip. [ 1762.290028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "aa39503a-2342-421e-928f-35ec7c8e47fb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.298484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f384187c73a448db29ddc36b1ad4a00 [ 1762.390999] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ff8fbf-7fbc-46a4-9d2c-89e0dd60e220 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.398484] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d95ad2d-116d-451c-9897-c64d0809ecd1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.430954] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e92e328-f53c-4d0f-b800-cdeaeec2a685 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.437990] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f46d34-fc7a-48ef-a4bf-49e48e1145d7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.451178] env[61649]: DEBUG nova.compute.provider_tree [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.451711] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg fc7029dc780c47d9ae5308dcc04afe03 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.459008] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc7029dc780c47d9ae5308dcc04afe03 [ 1762.459898] env[61649]: DEBUG nova.scheduler.client.report [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1762.462153] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 8e5502e1b89d4c8fbfc30fb86a5189e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.474791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e5502e1b89d4c8fbfc30fb86a5189e5 [ 1762.475502] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.475958] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1762.477757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 5a2c1a667099463c89bd157973e41234 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.504849] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a2c1a667099463c89bd157973e41234 [ 1762.505959] env[61649]: DEBUG nova.compute.utils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1762.506520] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 0decda98575a47c8ae12b76319288109 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.507735] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1762.507931] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1762.516065] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0decda98575a47c8ae12b76319288109 [ 1762.516065] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1762.516459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg d8029c13647e4e44a370e8479f9fe7a0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.542159] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8029c13647e4e44a370e8479f9fe7a0 [ 1762.544648] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg fc7fb25691e442659b6b058e7909023c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1762.551688] env[61649]: DEBUG nova.policy [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed97b90b998c477eae669b6132359808', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '911980132e374bbd9e861e2fba9466f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1762.571916] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc7fb25691e442659b6b058e7909023c [ 1762.572906] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1762.594207] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1762.594443] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1762.594598] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1762.594777] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1762.594923] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1762.595070] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1762.595272] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1762.595509] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1762.595710] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1762.595879] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1762.596107] env[61649]: DEBUG nova.virt.hardware [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1762.596953] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52177c1-3188-4dae-9288-80220f5d771d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.604884] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1370ced4-e3f9-4f1f-b161-e9d8acf54083 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.784203] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Successfully created port: 232bd51f-f217-4989-b4a1-7cc37fb017f7 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1763.378125] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Successfully updated port: 232bd51f-f217-4989-b4a1-7cc37fb017f7 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.378607] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg b278f0a28d17407f9d183cff533166ff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1763.388547] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b278f0a28d17407f9d183cff533166ff [ 1763.389300] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "refresh_cache-4e47e82d-780e-4c23-8071-083beab2a53f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.389395] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired lock "refresh_cache-4e47e82d-780e-4c23-8071-083beab2a53f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.389577] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1763.389986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 7a14724a94e945d0876371ddf17f2bc9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1763.396871] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a14724a94e945d0876371ddf17f2bc9 [ 1763.425635] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1763.601714] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Updating instance_info_cache with network_info: [{"id": "232bd51f-f217-4989-b4a1-7cc37fb017f7", "address": "fa:16:3e:4c:e3:bc", "network": {"id": "cf910fb5-25a3-4ac3-81ee-4f21af78b736", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1804430108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "911980132e374bbd9e861e2fba9466f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap232bd51f-f2", "ovs_interfaceid": "232bd51f-f217-4989-b4a1-7cc37fb017f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.602270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 6246b8342d2046a491c439a8e71fb799 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1763.614235] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6246b8342d2046a491c439a8e71fb799 [ 1763.614798] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Releasing lock "refresh_cache-4e47e82d-780e-4c23-8071-083beab2a53f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.615073] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance network_info: |[{"id": "232bd51f-f217-4989-b4a1-7cc37fb017f7", "address": "fa:16:3e:4c:e3:bc", "network": {"id": "cf910fb5-25a3-4ac3-81ee-4f21af78b736", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1804430108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "911980132e374bbd9e861e2fba9466f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap232bd51f-f2", "ovs_interfaceid": "232bd51f-f217-4989-b4a1-7cc37fb017f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1763.615458] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:e3:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '232bd51f-f217-4989-b4a1-7cc37fb017f7', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1763.622918] env[61649]: DEBUG oslo.service.loopingcall [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.623356] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1763.623575] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea937fbf-c235-4936-89d2-888a9d2911c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.643792] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1763.643792] env[61649]: value = "task-158273" [ 1763.643792] env[61649]: _type = "Task" [ 1763.643792] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.651459] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158273, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.043581] env[61649]: DEBUG nova.compute.manager [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Received event network-vif-plugged-232bd51f-f217-4989-b4a1-7cc37fb017f7 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1764.043828] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Acquiring lock "4e47e82d-780e-4c23-8071-083beab2a53f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.044013] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Lock "4e47e82d-780e-4c23-8071-083beab2a53f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.044235] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Lock "4e47e82d-780e-4c23-8071-083beab2a53f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.044403] env[61649]: DEBUG nova.compute.manager [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] No waiting events found dispatching network-vif-plugged-232bd51f-f217-4989-b4a1-7cc37fb017f7 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1764.044569] env[61649]: WARNING nova.compute.manager [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Received unexpected event network-vif-plugged-232bd51f-f217-4989-b4a1-7cc37fb017f7 for instance with vm_state building and task_state spawning. [ 1764.044726] env[61649]: DEBUG nova.compute.manager [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Received event network-changed-232bd51f-f217-4989-b4a1-7cc37fb017f7 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1764.044877] env[61649]: DEBUG nova.compute.manager [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Refreshing instance network info cache due to event network-changed-232bd51f-f217-4989-b4a1-7cc37fb017f7. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1764.045057] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Acquiring lock "refresh_cache-4e47e82d-780e-4c23-8071-083beab2a53f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.045189] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Acquired lock "refresh_cache-4e47e82d-780e-4c23-8071-083beab2a53f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.045339] env[61649]: DEBUG nova.network.neutron [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Refreshing network info cache for port 232bd51f-f217-4989-b4a1-7cc37fb017f7 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1764.045799] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Expecting reply to msg 8b4504c6f66e40e6a1e0d5fa18d86e98 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1764.052690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b4504c6f66e40e6a1e0d5fa18d86e98 [ 1764.154325] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158273, 'name': CreateVM_Task, 'duration_secs': 0.269856} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.154477] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1764.155115] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.155264] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.155581] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1764.155808] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5acb988f-beda-42f8-8311-2a754e29e662 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.159810] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 1764.159810] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a89853-c698-b0ca-4374-233084a0dc1b" [ 1764.159810] env[61649]: _type = "Task" [ 1764.159810] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.166736] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a89853-c698-b0ca-4374-233084a0dc1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.298613] env[61649]: DEBUG nova.network.neutron [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Updated VIF entry in instance network info cache for port 232bd51f-f217-4989-b4a1-7cc37fb017f7. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1764.298967] env[61649]: DEBUG nova.network.neutron [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Updating instance_info_cache with network_info: [{"id": "232bd51f-f217-4989-b4a1-7cc37fb017f7", "address": "fa:16:3e:4c:e3:bc", "network": {"id": "cf910fb5-25a3-4ac3-81ee-4f21af78b736", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1804430108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "911980132e374bbd9e861e2fba9466f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap232bd51f-f2", "ovs_interfaceid": "232bd51f-f217-4989-b4a1-7cc37fb017f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.299481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Expecting reply to msg 2e1f740e68df44a387537c8268a35c32 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1764.308801] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e1f740e68df44a387537c8268a35c32 [ 1764.308801] env[61649]: DEBUG oslo_concurrency.lockutils [req-ce6583c1-df7c-4699-98f7-d6f1e56b3a8e req-5d7cfb18-8f18-4d7e-ac42-d6b53036f1dd service nova] Releasing lock "refresh_cache-4e47e82d-780e-4c23-8071-083beab2a53f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.670189] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.670638] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1764.670716] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.548544] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 3d3b21c839a54073b732c858785f9867 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1775.557294] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d3b21c839a54073b732c858785f9867 [ 1775.557758] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "28a3b287-8717-42d5-989a-4f66642134f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.940019] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.929804] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.929973] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1798.929738] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1799.928573] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.924921] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1801.929693] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1801.929950] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1801.930037] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1801.931063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7896835b3ebf44efafdb3087778984c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1801.947676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7896835b3ebf44efafdb3087778984c4 [ 1801.949928] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950070] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950196] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950317] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950434] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950577] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950711] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950827] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.950941] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.951051] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1801.951165] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1802.928947] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.928688] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.929076] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.929228] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6ca1caf201984e4e94310caf50bfede0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1804.937686] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca1caf201984e4e94310caf50bfede0 [ 1804.938673] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.938881] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.939042] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.939212] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1804.940341] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7144bbd-1d6f-4515-abe4-ba8feb7520d8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.948999] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47287fcc-1cf7-4618-81a5-0e33734b07ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.963389] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7db4c65-aa63-403e-b9d6-ccecab0cc443 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.969267] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45d2d95-b50b-4157-bf31-7b38f72bcf18 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.997033] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181824MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1804.997174] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.997355] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.998105] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f769470142944545bdf06b38b5071a90 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1805.030386] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f769470142944545bdf06b38b5071a90 [ 1805.034347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 34d10e323d2841909223b0e1c4e652df in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1805.043103] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34d10e323d2841909223b0e1c4e652df [ 1805.086016] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086174] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance b6243867-9546-4663-9d48-5c040537490b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086301] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086424] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086546] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086667] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086783] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.086899] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.087393] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.087565] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1805.088230] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e0f34f542dbd480e874f3fbda85d4a1e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1805.097903] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0f34f542dbd480e874f3fbda85d4a1e [ 1805.098618] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1805.099090] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 36c4b8059fb74001a8bfb76884e75574 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1805.109190] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36c4b8059fb74001a8bfb76884e75574 [ 1805.109881] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1805.110097] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1805.110344] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1805.126851] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1805.140352] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1805.140535] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1805.149972] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1805.165699] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1805.302900] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c07cc3-f1aa-49c4-b761-fc019d37f130 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.310341] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1d248b-9b86-4579-9915-593849e4e5c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.339630] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8c3d7c-4cb3-4817-a970-706c2c0b0d92 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.346423] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38356026-19ad-4d3f-82c3-470d87432dcc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.359129] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.359577] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d44dcdc1ff064adab77459b0b50eeb5d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1805.366614] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d44dcdc1ff064adab77459b0b50eeb5d [ 1805.367460] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1805.369633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a2093576cd98432a8943c725aebb6c79 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1805.379819] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2093576cd98432a8943c725aebb6c79 [ 1805.380479] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1805.380684] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.383s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.189426] env[61649]: WARNING oslo_vmware.rw_handles [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1811.189426] env[61649]: ERROR oslo_vmware.rw_handles [ 1811.190278] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1811.192126] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1811.192395] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Copying Virtual Disk [datastore1] vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/7e3bbeaa-5b48-41e5-ab32-923a1b3ba348/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1811.192682] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-215da450-8958-4a7b-9630-0e2ec5b51ae6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.201806] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Waiting for the task: (returnval){ [ 1811.201806] env[61649]: value = "task-158274" [ 1811.201806] env[61649]: _type = "Task" [ 1811.201806] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.210755] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Task: {'id': task-158274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.714206] env[61649]: DEBUG oslo_vmware.exceptions [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1811.714467] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.715019] env[61649]: ERROR nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1811.715019] env[61649]: Faults: ['InvalidArgument'] [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Traceback (most recent call last): [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] yield resources [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self.driver.spawn(context, instance, image_meta, [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self._fetch_image_if_missing(context, vi) [ 1811.715019] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] image_cache(vi, tmp_image_ds_loc) [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] vm_util.copy_virtual_disk( [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] session._wait_for_task(vmdk_copy_task) [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] return self.wait_for_task(task_ref) [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] return evt.wait() [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] result = hub.switch() [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1811.715408] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] return self.greenlet.switch() [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self.f(*self.args, **self.kw) [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] raise exceptions.translate_fault(task_info.error) [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Faults: ['InvalidArgument'] [ 1811.715786] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] [ 1811.715786] env[61649]: INFO nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Terminating instance [ 1811.716957] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.717165] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.717421] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ff9679c-814d-478a-958f-e5698d0281a1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.719661] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1811.719858] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1811.720630] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e0fb1d-8eac-43a1-89b9-aea7cd7765be {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.727914] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1811.728152] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a881c7f-44be-44df-bed9-3dca4a60b785 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.730441] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.730611] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1811.731556] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b167d72-21d8-429a-b5c0-14fb4c47a0af {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.736152] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Waiting for the task: (returnval){ [ 1811.736152] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e29b27-403a-9b6f-d4a0-c0e9467564cf" [ 1811.736152] env[61649]: _type = "Task" [ 1811.736152] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.745264] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e29b27-403a-9b6f-d4a0-c0e9467564cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.802694] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1811.802970] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1811.803094] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Deleting the datastore file [datastore1] d6e8f17f-40c4-46e0-a900-d92d1da01ed8 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1811.803373] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-367d3a21-7c29-489c-967e-afc963c72b36 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.810306] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Waiting for the task: (returnval){ [ 1811.810306] env[61649]: value = "task-158276" [ 1811.810306] env[61649]: _type = "Task" [ 1811.810306] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.817945] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Task: {'id': task-158276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.246725] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1812.247096] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Creating directory with path [datastore1] vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1812.247171] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04c2bf55-ab5a-4cd3-8993-db3e8dbb3fe3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.258192] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Created directory with path [datastore1] vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1812.258377] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Fetch image to [datastore1] vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1812.258536] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1812.259228] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4de311-690e-47da-9d35-fd251a553a60 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.265378] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14061121-1287-41ea-bcec-e354f01324f9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.274329] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e397601-31a3-41e7-80e1-54870b15e9e3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.305115] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f2f5b7-bffd-4a5e-bce1-f3a327f1579b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.313455] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-113724f8-6666-499a-bca5-8cf1e04b80b6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.319748] env[61649]: DEBUG oslo_vmware.api [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Task: {'id': task-158276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07821} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.319996] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1812.320193] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1812.320365] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1812.320556] env[61649]: INFO nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1812.322641] env[61649]: DEBUG nova.compute.claims [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1812.322811] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.323023] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.324887] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg ec6b197ebce049129bac7f440c655f65 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1812.335120] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1812.356473] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec6b197ebce049129bac7f440c655f65 [ 1812.481671] env[61649]: DEBUG oslo_vmware.rw_handles [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1812.541032] env[61649]: DEBUG oslo_vmware.rw_handles [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1812.541231] env[61649]: DEBUG oslo_vmware.rw_handles [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1812.550741] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd6e216-b71c-43e1-91f6-73dcb87928c0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.560200] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3cf90c-7f6e-4a1a-aa47-8b1c5ec1f28f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.603279] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad083847-d430-4c8c-aab7-d8a4c177135d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.611045] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d022cfd2-a07e-48ea-a4d8-173b44b392cb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.625108] env[61649]: DEBUG nova.compute.provider_tree [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1812.625574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 0baf304cf7a743bd9a15cfbbbf9f7424 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1812.633085] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0baf304cf7a743bd9a15cfbbbf9f7424 [ 1812.633943] env[61649]: DEBUG nova.scheduler.client.report [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1812.636159] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg c9391088c2b74f0ab03904d408aa6a44 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1812.649076] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9391088c2b74f0ab03904d408aa6a44 [ 1812.649770] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.650329] env[61649]: ERROR nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1812.650329] env[61649]: Faults: ['InvalidArgument'] [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Traceback (most recent call last): [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self.driver.spawn(context, instance, image_meta, [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self._fetch_image_if_missing(context, vi) [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] image_cache(vi, tmp_image_ds_loc) [ 1812.650329] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] vm_util.copy_virtual_disk( [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] session._wait_for_task(vmdk_copy_task) [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] return self.wait_for_task(task_ref) [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] return evt.wait() [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] result = hub.switch() [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] return self.greenlet.switch() [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1812.650723] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] self.f(*self.args, **self.kw) [ 1812.651125] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1812.651125] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] raise exceptions.translate_fault(task_info.error) [ 1812.651125] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1812.651125] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Faults: ['InvalidArgument'] [ 1812.651125] env[61649]: ERROR nova.compute.manager [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] [ 1812.651125] env[61649]: DEBUG nova.compute.utils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1812.652404] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Build of instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 was re-scheduled: A specified parameter was not correct: fileType [ 1812.652404] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1812.652776] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1812.652952] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1812.653118] env[61649]: DEBUG nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1812.653276] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1812.938832] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 034662d474e54a5498df2ddcad47ffa0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1812.950778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 034662d474e54a5498df2ddcad47ffa0 [ 1812.951372] env[61649]: DEBUG nova.network.neutron [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.951882] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg ed08732b86fb47d385936f62eec1a960 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1812.962784] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed08732b86fb47d385936f62eec1a960 [ 1812.963362] env[61649]: INFO nova.compute.manager [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Took 0.31 seconds to deallocate network for instance. [ 1812.965348] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 4224583589d7414c840523a3ee1dc15b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1812.994398] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4224583589d7414c840523a3ee1dc15b [ 1812.996917] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 35618fec455244399bff36e6dabbe836 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.025500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35618fec455244399bff36e6dabbe836 [ 1813.045882] env[61649]: INFO nova.scheduler.client.report [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Deleted allocations for instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 [ 1813.051728] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg eebf59392d444ef598c654bef60841b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.061478] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eebf59392d444ef598c654bef60841b5 [ 1813.061949] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c05bf792-523a-4bcb-aa11-249179b813d3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 603.324s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.062466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg dc257e35704e4b3f80903821ebf4f26f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.063132] env[61649]: DEBUG oslo_concurrency.lockutils [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 407.307s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.063354] env[61649]: DEBUG oslo_concurrency.lockutils [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Acquiring lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.063560] env[61649]: DEBUG oslo_concurrency.lockutils [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.063728] env[61649]: DEBUG oslo_concurrency.lockutils [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.065572] env[61649]: INFO nova.compute.manager [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Terminating instance [ 1813.067360] env[61649]: DEBUG nova.compute.manager [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1813.067568] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1813.067828] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbe6cbd8-fb97-4d5f-84f9-896698eba87d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.078123] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e672ba4c-c39a-4224-9a0e-d18265e08603 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.088405] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc257e35704e4b3f80903821ebf4f26f [ 1813.088871] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1813.090486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 8816731764964796a68346b357acc53e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.108552] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6e8f17f-40c4-46e0-a900-d92d1da01ed8 could not be found. [ 1813.108792] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1813.108921] env[61649]: INFO nova.compute.manager [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1813.109191] env[61649]: DEBUG oslo.service.loopingcall [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1813.109420] env[61649]: DEBUG nova.compute.manager [-] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1813.109514] env[61649]: DEBUG nova.network.neutron [-] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1813.125874] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8816731764964796a68346b357acc53e [ 1813.129044] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6804696839744f1aa0448a9c2c4cb2aa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.134552] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6804696839744f1aa0448a9c2c4cb2aa [ 1813.134878] env[61649]: DEBUG nova.network.neutron [-] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.135222] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 06d83c3660394fc9927f8df290bb48b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.142124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06d83c3660394fc9927f8df290bb48b0 [ 1813.142837] env[61649]: INFO nova.compute.manager [-] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] Took 0.03 seconds to deallocate network for instance. [ 1813.143766] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.143990] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.145387] env[61649]: INFO nova.compute.claims [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1813.147010] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b8dd229d4e854bdabb03ad936702d297 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.151245] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 43d9b86327d44da78f58de3fdf99a06f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.176858] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43d9b86327d44da78f58de3fdf99a06f [ 1813.180503] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8dd229d4e854bdabb03ad936702d297 [ 1813.182268] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7732048dd4b4410e8dadcc0c101badea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.189508] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7732048dd4b4410e8dadcc0c101badea [ 1813.193786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg 6bc2d09ed785448a8ad2d62b24b94029 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.230888] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bc2d09ed785448a8ad2d62b24b94029 [ 1813.233722] env[61649]: DEBUG oslo_concurrency.lockutils [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.234048] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-20fe40a7-c967-4497-ac62-7cb842ad6dd3 tempest-ServerAddressesTestJSON-785079150 tempest-ServerAddressesTestJSON-785079150-project-member] Expecting reply to msg c5547a5aeb664b7196eeecdf0a28a4b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.234710] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 75.215s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.234899] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d6e8f17f-40c4-46e0-a900-d92d1da01ed8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1813.235068] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "d6e8f17f-40c4-46e0-a900-d92d1da01ed8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.244498] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5547a5aeb664b7196eeecdf0a28a4b0 [ 1813.334733] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef314b3c-3d9e-43df-b20a-43be5650f206 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.343341] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639a4581-70db-41bd-9684-c680fc4944fc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.374735] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd31682-8a65-4bb0-914d-cb8e81657a26 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.381743] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c48e60-0231-47c2-bb30-daf3d1966e9e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.394130] env[61649]: DEBUG nova.compute.provider_tree [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.394597] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg f84e6672f75444d696e3754ff5c54218 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.402017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f84e6672f75444d696e3754ff5c54218 [ 1813.402940] env[61649]: DEBUG nova.scheduler.client.report [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1813.405170] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg c638b9259d8142a3a2569f9e7bd211ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.418007] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c638b9259d8142a3a2569f9e7bd211ce [ 1813.418740] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.275s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.419200] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1813.420899] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg db2662d71a3e4d5c87921e9df09539a2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.448149] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db2662d71a3e4d5c87921e9df09539a2 [ 1813.449674] env[61649]: DEBUG nova.compute.utils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1813.450284] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e9470635dc9942e9945949ed6f129346 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.451309] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1813.451487] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1813.459505] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9470635dc9942e9945949ed6f129346 [ 1813.460068] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1813.461693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b0d783a20f604a25bf4ad9117d097ca6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.492907] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0d783a20f604a25bf4ad9117d097ca6 [ 1813.495477] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 8923da9c5ace40c39cee25827c25ab23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1813.523400] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8923da9c5ace40c39cee25827c25ab23 [ 1813.524695] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1813.530392] env[61649]: DEBUG nova.policy [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc5f71ebe35b4863a38dd7606ae87937', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72501ae7a7dd4f85801c096912a5af36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1813.546370] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1813.546639] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1813.546849] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1813.547068] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1813.547246] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1813.547428] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1813.547769] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1813.547970] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1813.548190] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1813.548385] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1813.548593] env[61649]: DEBUG nova.virt.hardware [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1813.549866] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d747aa-14e2-483d-910c-b1f7919da6b4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.558060] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06611d1a-c277-4349-8d6f-d3c6f89f2601 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.792289] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Successfully created port: 8d6576de-7402-4bdd-9bc8-3a1648f70b14 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1814.291964] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Successfully updated port: 8d6576de-7402-4bdd-9bc8-3a1648f70b14 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1814.291964] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 09c0edbf09bd451fbcf06638ccd338b3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1814.294690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09c0edbf09bd451fbcf06638ccd338b3 [ 1814.294690] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "refresh_cache-8b1cd843-48b0-4e85-93fa-32ddd6e32883" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.294690] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "refresh_cache-8b1cd843-48b0-4e85-93fa-32ddd6e32883" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.294690] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1814.294690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7935220778e1435587e07d4557dd7f44 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1814.301392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7935220778e1435587e07d4557dd7f44 [ 1814.357339] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1814.550137] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Updating instance_info_cache with network_info: [{"id": "8d6576de-7402-4bdd-9bc8-3a1648f70b14", "address": "fa:16:3e:4a:fc:c2", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6576de-74", "ovs_interfaceid": "8d6576de-7402-4bdd-9bc8-3a1648f70b14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.550643] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 21ffd4f7ad154edda3f31b6913abf6c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1814.562985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21ffd4f7ad154edda3f31b6913abf6c5 [ 1814.563570] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "refresh_cache-8b1cd843-48b0-4e85-93fa-32ddd6e32883" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.563836] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance network_info: |[{"id": "8d6576de-7402-4bdd-9bc8-3a1648f70b14", "address": "fa:16:3e:4a:fc:c2", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6576de-74", "ovs_interfaceid": "8d6576de-7402-4bdd-9bc8-3a1648f70b14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1814.564253] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:fc:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d6576de-7402-4bdd-9bc8-3a1648f70b14', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1814.571560] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating folder: Project (72501ae7a7dd4f85801c096912a5af36). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1814.572091] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4901b5bd-472f-48d8-abe1-6abf87286b72 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.583322] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created folder: Project (72501ae7a7dd4f85801c096912a5af36) in parent group-v51588. [ 1814.583499] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating folder: Instances. Parent ref: group-v51691. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1814.583714] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3368eff4-388c-4be6-b98f-feb5f29d8091 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.592493] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created folder: Instances in parent group-v51691. [ 1814.592709] env[61649]: DEBUG oslo.service.loopingcall [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1814.592883] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1814.593067] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd0b55a7-e1f6-44aa-93c2-40b3995bea4e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.614175] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1814.614175] env[61649]: value = "task-158279" [ 1814.614175] env[61649]: _type = "Task" [ 1814.614175] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.621419] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158279, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.991484] env[61649]: DEBUG nova.compute.manager [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Received event network-vif-plugged-8d6576de-7402-4bdd-9bc8-3a1648f70b14 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1814.991738] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Acquiring lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.991977] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.992198] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.992370] env[61649]: DEBUG nova.compute.manager [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] No waiting events found dispatching network-vif-plugged-8d6576de-7402-4bdd-9bc8-3a1648f70b14 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1814.992535] env[61649]: WARNING nova.compute.manager [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Received unexpected event network-vif-plugged-8d6576de-7402-4bdd-9bc8-3a1648f70b14 for instance with vm_state building and task_state spawning. [ 1814.992694] env[61649]: DEBUG nova.compute.manager [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Received event network-changed-8d6576de-7402-4bdd-9bc8-3a1648f70b14 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1814.992850] env[61649]: DEBUG nova.compute.manager [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Refreshing instance network info cache due to event network-changed-8d6576de-7402-4bdd-9bc8-3a1648f70b14. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1814.993031] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Acquiring lock "refresh_cache-8b1cd843-48b0-4e85-93fa-32ddd6e32883" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.993168] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Acquired lock "refresh_cache-8b1cd843-48b0-4e85-93fa-32ddd6e32883" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.993323] env[61649]: DEBUG nova.network.neutron [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Refreshing network info cache for port 8d6576de-7402-4bdd-9bc8-3a1648f70b14 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1814.993792] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Expecting reply to msg bda108be5d5140ddb0ebec463f19e86e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1815.001374] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bda108be5d5140ddb0ebec463f19e86e [ 1815.124184] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158279, 'name': CreateVM_Task, 'duration_secs': 0.280898} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.126375] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1815.127460] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.127639] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.127969] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1815.128493] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-742dffc7-0c17-4306-afbc-ea1656107648 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.132997] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 1815.132997] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a4cd96-e3a9-60f8-0c04-19e3a646c62b" [ 1815.132997] env[61649]: _type = "Task" [ 1815.132997] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.140785] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a4cd96-e3a9-60f8-0c04-19e3a646c62b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.216746] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.216986] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.222743] env[61649]: DEBUG nova.network.neutron [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Updated VIF entry in instance network info cache for port 8d6576de-7402-4bdd-9bc8-3a1648f70b14. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1815.223084] env[61649]: DEBUG nova.network.neutron [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Updating instance_info_cache with network_info: [{"id": "8d6576de-7402-4bdd-9bc8-3a1648f70b14", "address": "fa:16:3e:4a:fc:c2", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d6576de-74", "ovs_interfaceid": "8d6576de-7402-4bdd-9bc8-3a1648f70b14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.223550] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Expecting reply to msg f890247a0e694edcadcee2ef923be3d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1815.231397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f890247a0e694edcadcee2ef923be3d5 [ 1815.231904] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ad48ef0-94ec-4419-a796-7bb89cea20d8 req-ff101c59-a1a5-4dfd-8260-1fcb4c3a39cc service nova] Releasing lock "refresh_cache-8b1cd843-48b0-4e85-93fa-32ddd6e32883" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.642867] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.643231] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1815.643553] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.594892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg e5f5ad705f5a4104ba990c6c3da48d42 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1824.605450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5f5ad705f5a4104ba990c6c3da48d42 [ 1824.605706] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "4e47e82d-780e-4c23-8071-083beab2a53f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.986601] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0e35619f24274944972ec1c6a4619bed in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1850.995881] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e35619f24274944972ec1c6a4619bed [ 1857.382560] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1857.929448] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1857.929643] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1858.929935] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1860.247311] env[61649]: WARNING oslo_vmware.rw_handles [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1860.247311] env[61649]: ERROR oslo_vmware.rw_handles [ 1860.248354] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1860.250139] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1860.250370] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Copying Virtual Disk [datastore1] vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/7f7d9600-68a0-4169-8f23-e9e164d8c277/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1860.250646] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95a72faa-3a01-410a-a2b4-c8b3910d8089 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.257637] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Waiting for the task: (returnval){ [ 1860.257637] env[61649]: value = "task-158280" [ 1860.257637] env[61649]: _type = "Task" [ 1860.257637] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.265236] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Task: {'id': task-158280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.768448] env[61649]: DEBUG oslo_vmware.exceptions [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1860.768448] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.768733] env[61649]: ERROR nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1860.768733] env[61649]: Faults: ['InvalidArgument'] [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] Traceback (most recent call last): [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] yield resources [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self.driver.spawn(context, instance, image_meta, [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self._fetch_image_if_missing(context, vi) [ 1860.768733] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] image_cache(vi, tmp_image_ds_loc) [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] vm_util.copy_virtual_disk( [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] session._wait_for_task(vmdk_copy_task) [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] return self.wait_for_task(task_ref) [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] return evt.wait() [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] result = hub.switch() [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1860.769177] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] return self.greenlet.switch() [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self.f(*self.args, **self.kw) [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] raise exceptions.translate_fault(task_info.error) [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] Faults: ['InvalidArgument'] [ 1860.769659] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] [ 1860.769659] env[61649]: INFO nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Terminating instance [ 1860.770666] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.770868] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1860.771118] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07637df7-dbcd-43fa-830e-a33f1bac3aaf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.773443] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1860.773639] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1860.774335] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c032d225-57ee-48b3-b176-a9656714c035 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.780862] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1860.781069] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eaaa6e4-91dc-49fd-be8b-42755943cfb2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.783159] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1860.783327] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1860.784251] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5345934d-1374-4d36-bf17-fa84c22944b4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.789108] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 1860.789108] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5250ae2b-985c-767f-abc9-7c828e44e3b5" [ 1860.789108] env[61649]: _type = "Task" [ 1860.789108] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.803499] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1860.803728] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating directory with path [datastore1] vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1860.803931] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c2e2d15-e3c7-4e10-8f97-632cfc86ed59 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.825490] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Created directory with path [datastore1] vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1860.825706] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Fetch image to [datastore1] vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1860.825877] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1860.826627] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa22a398-df33-4605-ac2f-b1a87bcebf4d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.833186] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899b5663-2db1-4762-aa99-8b797c444d79 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.843431] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086f3346-0156-40fd-aa33-9113209801db {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.848355] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1860.848548] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1860.848723] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Deleting the datastore file [datastore1] b6243867-9546-4663-9d48-5c040537490b {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1860.849253] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a77c7e22-c8cc-4cc9-b9a3-dc23b0482a28 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.876807] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0f0e3d-a021-4598-9206-7cf77404847a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.879125] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Waiting for the task: (returnval){ [ 1860.879125] env[61649]: value = "task-158282" [ 1860.879125] env[61649]: _type = "Task" [ 1860.879125] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.883856] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f21e8270-5fc1-49bb-9b62-963bcb60fb4d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.887789] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Task: {'id': task-158282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.904271] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1860.950677] env[61649]: DEBUG oslo_vmware.rw_handles [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1861.009595] env[61649]: DEBUG oslo_vmware.rw_handles [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1861.009806] env[61649]: DEBUG oslo_vmware.rw_handles [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1861.389842] env[61649]: DEBUG oslo_vmware.api [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Task: {'id': task-158282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071612} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.390222] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1861.390354] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1861.390531] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1861.390700] env[61649]: INFO nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1861.392807] env[61649]: DEBUG nova.compute.claims [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1861.392988] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.393196] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.395179] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg d64c7b1e4b264198af923573db8bb256 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1861.426795] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d64c7b1e4b264198af923573db8bb256 [ 1861.567122] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d8f30c-c8c8-4277-83a9-f2813580f959 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.574478] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bb304c-d98e-4472-96c9-de559388b5b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.602920] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04e6fdd-8d26-4e52-8775-4dcfd402dace {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.609981] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723ea284-80fb-433f-a99f-dc7d5d558118 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.623465] env[61649]: DEBUG nova.compute.provider_tree [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.624712] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg ab002e52a5534b03a421e21cf7d19545 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1861.631645] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab002e52a5534b03a421e21cf7d19545 [ 1861.632562] env[61649]: DEBUG nova.scheduler.client.report [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1861.634797] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg e5b833b0568345a19026cfe911c6fbb7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1861.645637] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5b833b0568345a19026cfe911c6fbb7 [ 1861.646264] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.646777] env[61649]: ERROR nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1861.646777] env[61649]: Faults: ['InvalidArgument'] [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] Traceback (most recent call last): [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self.driver.spawn(context, instance, image_meta, [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self._fetch_image_if_missing(context, vi) [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] image_cache(vi, tmp_image_ds_loc) [ 1861.646777] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] vm_util.copy_virtual_disk( [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] session._wait_for_task(vmdk_copy_task) [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] return self.wait_for_task(task_ref) [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] return evt.wait() [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] result = hub.switch() [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] return self.greenlet.switch() [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1861.647339] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] self.f(*self.args, **self.kw) [ 1861.647929] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1861.647929] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] raise exceptions.translate_fault(task_info.error) [ 1861.647929] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1861.647929] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] Faults: ['InvalidArgument'] [ 1861.647929] env[61649]: ERROR nova.compute.manager [instance: b6243867-9546-4663-9d48-5c040537490b] [ 1861.647929] env[61649]: DEBUG nova.compute.utils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1861.648796] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Build of instance b6243867-9546-4663-9d48-5c040537490b was re-scheduled: A specified parameter was not correct: fileType [ 1861.648796] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1861.649171] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1861.649341] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1861.649508] env[61649]: DEBUG nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1861.649692] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1861.922545] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 416b8d2cf13c4ecc93c84a81657facef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1861.923580] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1861.928373] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1861.929734] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 416b8d2cf13c4ecc93c84a81657facef [ 1861.930242] env[61649]: DEBUG nova.network.neutron [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.930729] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 6ae2e56857bc4fd28d9bf3310244f352 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1861.941364] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ae2e56857bc4fd28d9bf3310244f352 [ 1861.942210] env[61649]: INFO nova.compute.manager [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Took 0.29 seconds to deallocate network for instance. [ 1861.943994] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg dc892d2d8a0249c9ade68777c5790dfd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1861.975623] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc892d2d8a0249c9ade68777c5790dfd [ 1861.978435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 4eb1a793048b45f29f950c550c0cd817 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.007217] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4eb1a793048b45f29f950c550c0cd817 [ 1862.025154] env[61649]: INFO nova.scheduler.client.report [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Deleted allocations for instance b6243867-9546-4663-9d48-5c040537490b [ 1862.031818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 8d41a9422b384443a69f874b8ce23b7b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.049676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d41a9422b384443a69f874b8ce23b7b [ 1862.050185] env[61649]: DEBUG oslo_concurrency.lockutils [None req-afc0fdbc-281b-4e43-8622-5765a20c0296 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "b6243867-9546-4663-9d48-5c040537490b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.044s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.050728] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b3137fefc47b4fd0873d39cd2fe99158 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.051426] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "b6243867-9546-4663-9d48-5c040537490b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.882s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.051644] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Acquiring lock "b6243867-9546-4663-9d48-5c040537490b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.051848] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "b6243867-9546-4663-9d48-5c040537490b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.052015] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "b6243867-9546-4663-9d48-5c040537490b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.053891] env[61649]: INFO nova.compute.manager [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Terminating instance [ 1862.055464] env[61649]: DEBUG nova.compute.manager [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1862.055655] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1862.056124] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37f1dba9-7344-48dc-8544-ef7aaf6d6a9c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.065379] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5c7278-7907-4bdc-b9fc-bdb07f442696 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.075677] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3137fefc47b4fd0873d39cd2fe99158 [ 1862.076159] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1862.077755] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 6732230cb4b54e86906c14d0f826c111 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.095255] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6243867-9546-4663-9d48-5c040537490b could not be found. [ 1862.095604] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1862.095914] env[61649]: INFO nova.compute.manager [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] [instance: b6243867-9546-4663-9d48-5c040537490b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1862.096305] env[61649]: DEBUG oslo.service.loopingcall [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1862.096646] env[61649]: DEBUG nova.compute.manager [-] [instance: b6243867-9546-4663-9d48-5c040537490b] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1862.096848] env[61649]: DEBUG nova.network.neutron [-] [instance: b6243867-9546-4663-9d48-5c040537490b] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1862.107100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6732230cb4b54e86906c14d0f826c111 [ 1862.115161] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a842703a3f6a4c55bda260fe8fd35def in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.121099] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.121450] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.122951] env[61649]: INFO nova.compute.claims [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1862.124576] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg dabd8ee8248f4765b81fab5ce36df203 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.125745] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a842703a3f6a4c55bda260fe8fd35def [ 1862.126544] env[61649]: DEBUG nova.network.neutron [-] [instance: b6243867-9546-4663-9d48-5c040537490b] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.126959] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f5bd6805f78946aea34828c9dd6e8eef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.133953] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5bd6805f78946aea34828c9dd6e8eef [ 1862.134549] env[61649]: INFO nova.compute.manager [-] [instance: b6243867-9546-4663-9d48-5c040537490b] Took 0.04 seconds to deallocate network for instance. [ 1862.139336] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg ca88720cc72b4dfa8649ba503f28f012 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.160783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dabd8ee8248f4765b81fab5ce36df203 [ 1862.162328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 23e86e17eea444c690510999eb9379a2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.170097] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23e86e17eea444c690510999eb9379a2 [ 1862.177705] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca88720cc72b4dfa8649ba503f28f012 [ 1862.195617] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg ab8cdbb69e5946ab8f2230c5763a5bcf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.253559] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab8cdbb69e5946ab8f2230c5763a5bcf [ 1862.256329] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Lock "b6243867-9546-4663-9d48-5c040537490b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.256649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9227507e-cf44-47c2-831d-9ef1a3f29e02 tempest-AttachVolumeTestJSON-1656492236 tempest-AttachVolumeTestJSON-1656492236-project-member] Expecting reply to msg 46c0af4b78ca41638049d244934a4353 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.258015] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "b6243867-9546-4663-9d48-5c040537490b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 124.238s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.258216] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: b6243867-9546-4663-9d48-5c040537490b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1862.258401] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "b6243867-9546-4663-9d48-5c040537490b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.267486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46c0af4b78ca41638049d244934a4353 [ 1862.317887] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fb123b-b55e-4099-ad1c-57e70d71d798 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.324794] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb02557-43c8-4285-83b8-50214fe6d739 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.354906] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75579f55-009a-4830-9683-dc77389d34da {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.361627] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dadd5d-be5c-404c-83df-c8f05be9137d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.374089] env[61649]: DEBUG nova.compute.provider_tree [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.374462] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 5ce4ce17a69745539f9f8cac4704aa09 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.381789] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ce4ce17a69745539f9f8cac4704aa09 [ 1862.382629] env[61649]: DEBUG nova.scheduler.client.report [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1862.384792] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 6936fd41735f481fa058c23a69db7367 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.397124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6936fd41735f481fa058c23a69db7367 [ 1862.397743] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.398187] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1862.399931] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 3f39c6ca8baf47c9838f75a1e05125be in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.426298] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f39c6ca8baf47c9838f75a1e05125be [ 1862.427712] env[61649]: DEBUG nova.compute.utils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1862.428301] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg f493503f4c494ab482ccaf45fc548e1e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.429030] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1862.429202] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1862.436868] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f493503f4c494ab482ccaf45fc548e1e [ 1862.437358] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1862.438897] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 499c7367bc5a4a4cb7a438fd69e3e7c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.465150] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 499c7367bc5a4a4cb7a438fd69e3e7c5 [ 1862.468072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 9a0bccbcb6ab4db4beb9d864bdb44abe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.471510] env[61649]: DEBUG nova.policy [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eecfef918474dc8ad298d9eb189f56f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3939f446f6f04aa08a0b91101e55572b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1862.500691] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a0bccbcb6ab4db4beb9d864bdb44abe [ 1862.501774] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1862.523938] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1862.524195] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1862.524354] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1862.524534] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1862.524679] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1862.524827] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1862.525033] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1862.525191] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1862.525355] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1862.525516] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1862.525688] env[61649]: DEBUG nova.virt.hardware [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1862.526522] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ace418-69e4-44c7-bd80-5d472c490f9c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.534170] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0616dfcb-90db-4ff5-b252-9477e9ea5539 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.765669] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Successfully created port: 5e0a5bd0-be94-4f00-9107-76f3e92e2c6b {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1862.929934] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 2ad56e2255764b808865567e3b76a73d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1862.939083] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ad56e2255764b808865567e3b76a73d [ 1862.939864] env[61649]: DEBUG oslo_concurrency.lockutils [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.322483] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Successfully updated port: 5e0a5bd0-be94-4f00-9107-76f3e92e2c6b {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1863.323013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg ffb58b8b312644b692a44d0a092d6bdd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1863.331360] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffb58b8b312644b692a44d0a092d6bdd [ 1863.331857] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "refresh_cache-d8503feb-d1df-4e1f-8357-e080e8bdb174" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.332057] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "refresh_cache-d8503feb-d1df-4e1f-8357-e080e8bdb174" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.332203] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1863.332608] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 98035835ff8a408699e25ae3af9192ae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1863.341983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98035835ff8a408699e25ae3af9192ae [ 1863.386416] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1863.561676] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Updating instance_info_cache with network_info: [{"id": "5e0a5bd0-be94-4f00-9107-76f3e92e2c6b", "address": "fa:16:3e:e0:b0:97", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e0a5bd0-be", "ovs_interfaceid": "5e0a5bd0-be94-4f00-9107-76f3e92e2c6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.562203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 9d2b7e9a0ed74280a7b9391e66b22199 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1863.575716] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d2b7e9a0ed74280a7b9391e66b22199 [ 1863.576331] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "refresh_cache-d8503feb-d1df-4e1f-8357-e080e8bdb174" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.576604] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance network_info: |[{"id": "5e0a5bd0-be94-4f00-9107-76f3e92e2c6b", "address": "fa:16:3e:e0:b0:97", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e0a5bd0-be", "ovs_interfaceid": "5e0a5bd0-be94-4f00-9107-76f3e92e2c6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1863.577027] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:b0:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e0a5bd0-be94-4f00-9107-76f3e92e2c6b', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1863.584557] env[61649]: DEBUG oslo.service.loopingcall [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1863.584999] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1863.585221] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4995913c-f990-4526-b52f-1e98ad1a7b8b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.604910] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1863.604910] env[61649]: value = "task-158283" [ 1863.604910] env[61649]: _type = "Task" [ 1863.604910] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.612155] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158283, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.929578] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.929806] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1863.929932] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1863.930514] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fc2f3c000acd49379a057a19f5142dbd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1863.947430] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc2f3c000acd49379a057a19f5142dbd [ 1863.950271] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.950429] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.950559] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.950704] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.950837] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.950960] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.951082] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.951201] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.951439] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.951439] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1863.951569] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1863.952020] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.974437] env[61649]: DEBUG nova.compute.manager [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Received event network-vif-plugged-5e0a5bd0-be94-4f00-9107-76f3e92e2c6b {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1863.974637] env[61649]: DEBUG oslo_concurrency.lockutils [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Acquiring lock "d8503feb-d1df-4e1f-8357-e080e8bdb174-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.974841] env[61649]: DEBUG oslo_concurrency.lockutils [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.975008] env[61649]: DEBUG oslo_concurrency.lockutils [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.975173] env[61649]: DEBUG nova.compute.manager [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] No waiting events found dispatching network-vif-plugged-5e0a5bd0-be94-4f00-9107-76f3e92e2c6b {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1863.975335] env[61649]: WARNING nova.compute.manager [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Received unexpected event network-vif-plugged-5e0a5bd0-be94-4f00-9107-76f3e92e2c6b for instance with vm_state building and task_state spawning. [ 1863.975493] env[61649]: DEBUG nova.compute.manager [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Received event network-changed-5e0a5bd0-be94-4f00-9107-76f3e92e2c6b {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1863.975645] env[61649]: DEBUG nova.compute.manager [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Refreshing instance network info cache due to event network-changed-5e0a5bd0-be94-4f00-9107-76f3e92e2c6b. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1863.975827] env[61649]: DEBUG oslo_concurrency.lockutils [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Acquiring lock "refresh_cache-d8503feb-d1df-4e1f-8357-e080e8bdb174" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.975960] env[61649]: DEBUG oslo_concurrency.lockutils [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Acquired lock "refresh_cache-d8503feb-d1df-4e1f-8357-e080e8bdb174" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.976131] env[61649]: DEBUG nova.network.neutron [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Refreshing network info cache for port 5e0a5bd0-be94-4f00-9107-76f3e92e2c6b {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1863.976591] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Expecting reply to msg 69262769d47b493b9b0f8054233af63a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1863.983385] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69262769d47b493b9b0f8054233af63a [ 1864.117871] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158283, 'name': CreateVM_Task, 'duration_secs': 0.286205} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.118071] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1864.127820] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.128028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.128423] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1864.128675] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64c74975-91f4-4a98-a468-ba2e1d31cd2e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.133275] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1864.133275] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]528bb086-5aeb-929c-9e1d-10a202bc5921" [ 1864.133275] env[61649]: _type = "Task" [ 1864.133275] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.140764] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]528bb086-5aeb-929c-9e1d-10a202bc5921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.403883] env[61649]: DEBUG nova.network.neutron [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Updated VIF entry in instance network info cache for port 5e0a5bd0-be94-4f00-9107-76f3e92e2c6b. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1864.404251] env[61649]: DEBUG nova.network.neutron [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Updating instance_info_cache with network_info: [{"id": "5e0a5bd0-be94-4f00-9107-76f3e92e2c6b", "address": "fa:16:3e:e0:b0:97", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e0a5bd0-be", "ovs_interfaceid": "5e0a5bd0-be94-4f00-9107-76f3e92e2c6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.404749] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Expecting reply to msg fd084c14b1754a0f9f1fa0c5eed82e59 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1864.413239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd084c14b1754a0f9f1fa0c5eed82e59 [ 1864.413791] env[61649]: DEBUG oslo_concurrency.lockutils [req-759a4b5d-e03f-4c59-8bd1-2e2ea72857e0 req-6b18aed6-a697-40a2-9983-ab4bd5ad7d9c service nova] Releasing lock "refresh_cache-d8503feb-d1df-4e1f-8357-e080e8bdb174" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.643627] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.643936] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1864.644174] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.928646] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1865.929101] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 84b60bb863234eeead540fc963316dcc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1865.938791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84b60bb863234eeead540fc963316dcc [ 1865.939836] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.940068] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.940232] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.940394] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1865.941919] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d948a37-2eee-4de8-aaf8-4657307feefb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.950204] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e1cea0-c8ea-4ee7-bf16-1efd17a03f45 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.966079] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4af224-6183-474e-8f90-bb4dc68540e2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.972502] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29f9af1-aede-4bf2-9af7-6538013043cc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.001693] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181828MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1866.001840] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.002058] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.002980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 36c0ca8f7b09405197eec9387afd3153 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1866.036100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36c0ca8f7b09405197eec9387afd3153 [ 1866.040021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f4668820b11047cd998a523fc5c49d8e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1866.048536] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4668820b11047cd998a523fc5c49d8e [ 1866.066839] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.066839] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ff225293-ad72-499a-9b5b-147d0bc40350 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.066839] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.066839] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067057] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067057] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067057] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067057] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067189] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067189] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1866.067322] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 902d4d42c1734039ba64f6c147a3be57 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1866.077216] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 902d4d42c1734039ba64f6c147a3be57 [ 1866.077939] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1866.078166] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1866.078311] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1866.215998] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aff8b20-cf46-472e-aeba-3c3a18382c75 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.220368] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7547373c-c9c1-4d57-b1ae-2f995198795b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.250573] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661a3186-0a76-4349-a7a1-4f7a30fdcd16 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.257628] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923caafb-a42b-486e-a0d5-f1eb9835e6d9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.270556] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1866.271142] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 918438ee513b439c8d6a995d238c0d85 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1866.278571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 918438ee513b439c8d6a995d238c0d85 [ 1866.279514] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1866.281964] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3cd350cdbb694cf9ac364fc533c7d915 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1866.292258] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cd350cdbb694cf9ac364fc533c7d915 [ 1866.292863] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1866.293040] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.291s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.293562] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.923932] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.924625] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a8f5ee6f71de42d8925b3897ab5599a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1873.941388] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8f5ee6f71de42d8925b3897ab5599a7 [ 1905.089670] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "4d429147-d3fe-4d99-af2a-e28a3829f434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.090044] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.926740] env[61649]: WARNING oslo_vmware.rw_handles [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1910.926740] env[61649]: ERROR oslo_vmware.rw_handles [ 1910.927759] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1910.929219] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1910.929465] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Copying Virtual Disk [datastore1] vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/5ee811f1-e831-4220-b7d8-7a3b07c0bff1/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1910.929794] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bd603fc-4404-4299-8e77-184807b1135b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.941659] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 1910.941659] env[61649]: value = "task-158284" [ 1910.941659] env[61649]: _type = "Task" [ 1910.941659] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.949752] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': task-158284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.451862] env[61649]: DEBUG oslo_vmware.exceptions [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1911.452205] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.452760] env[61649]: ERROR nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1911.452760] env[61649]: Faults: ['InvalidArgument'] [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Traceback (most recent call last): [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] yield resources [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self.driver.spawn(context, instance, image_meta, [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self._fetch_image_if_missing(context, vi) [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1911.452760] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] image_cache(vi, tmp_image_ds_loc) [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] vm_util.copy_virtual_disk( [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] session._wait_for_task(vmdk_copy_task) [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] return self.wait_for_task(task_ref) [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] return evt.wait() [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] result = hub.switch() [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] return self.greenlet.switch() [ 1911.453469] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1911.454094] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self.f(*self.args, **self.kw) [ 1911.454094] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1911.454094] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] raise exceptions.translate_fault(task_info.error) [ 1911.454094] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1911.454094] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Faults: ['InvalidArgument'] [ 1911.454094] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] [ 1911.454094] env[61649]: INFO nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Terminating instance [ 1911.455221] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.455221] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.455221] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d34dee5e-8941-4c7c-b72e-35c0013723e9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.457290] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1911.457485] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1911.458203] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109acc81-598a-43ae-a147-c15eac0941f0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.466221] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1911.466439] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bed487dc-e15a-4e9f-862c-e46a632a8a77 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.468555] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.468729] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1911.469688] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16ee502b-e371-4235-9c70-2a047345f48f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.474353] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Waiting for the task: (returnval){ [ 1911.474353] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]528869cb-235b-9eaa-0bfe-14a54ee0857e" [ 1911.474353] env[61649]: _type = "Task" [ 1911.474353] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.482920] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]528869cb-235b-9eaa-0bfe-14a54ee0857e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.535483] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1911.535695] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1911.535879] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Deleting the datastore file [datastore1] 4b87e74a-2408-466f-b1c2-68330c31fb9d {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1911.537107] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf9bb014-a71e-4519-910c-cfde6bccad1d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.543784] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 1911.543784] env[61649]: value = "task-158286" [ 1911.543784] env[61649]: _type = "Task" [ 1911.543784] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.551392] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': task-158286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.984791] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1911.985160] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Creating directory with path [datastore1] vmware_temp/9ff9b526-bca3-4dff-8dc9-5a8e01883784/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.985276] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7729a67e-c749-4d97-84bd-1ce8e02779cb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.996517] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Created directory with path [datastore1] vmware_temp/9ff9b526-bca3-4dff-8dc9-5a8e01883784/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.996717] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Fetch image to [datastore1] vmware_temp/9ff9b526-bca3-4dff-8dc9-5a8e01883784/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1911.996892] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/9ff9b526-bca3-4dff-8dc9-5a8e01883784/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1911.997650] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8a2108-79ab-4229-8f1c-aad1e021946f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.003963] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00611be0-9014-4dd5-84ef-046dce605bb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.012619] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5219996-c058-48fa-91aa-5936ee52d6b9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.042455] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d2a7f2-b2ce-43ab-9eea-6a6cf6c62582 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.052620] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f0d4b61c-f154-4a9f-b40c-7585589231f2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.054313] env[61649]: DEBUG oslo_vmware.api [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': task-158286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065436} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.054572] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1912.054755] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1912.054928] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1912.055105] env[61649]: INFO nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1912.057148] env[61649]: DEBUG nova.compute.claims [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1912.057328] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.057560] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.059371] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 6cfb5e1bbe1f4cb8a430e5b654015992 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.077480] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1912.090075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cfb5e1bbe1f4cb8a430e5b654015992 [ 1912.220626] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.221409] env[61649]: ERROR nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = getattr(controller, method)(*args, **kwargs) [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._get(image_id) [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1912.221409] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] resp, body = self.http_client.get(url, headers=header) [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.request(url, 'GET', **kwargs) [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._handle_response(resp) [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exc.from_response(resp, resp.content) [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1912.221835] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] yield resources [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self.driver.spawn(context, instance, image_meta, [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._fetch_image_if_missing(context, vi) [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image_fetch(context, vi, tmp_image_ds_loc) [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] images.fetch_image( [ 1912.222244] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] metadata = IMAGE_API.get(context, image_ref) [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return session.show(context, image_id, [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] _reraise_translated_image_exception(image_id) [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise new_exc.with_traceback(exc_trace) [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = getattr(controller, method)(*args, **kwargs) [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1912.222708] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._get(image_id) [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] resp, body = self.http_client.get(url, headers=header) [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.request(url, 'GET', **kwargs) [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._handle_response(resp) [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exc.from_response(resp, resp.content) [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1912.223152] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1912.223507] env[61649]: INFO nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Terminating instance [ 1912.223507] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.223507] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1912.223961] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1912.224165] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1912.224395] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9864ec97-e20e-426c-9aa6-d3b8ac5c81c3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.228506] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec2360a-a900-41c2-a36a-7bdd2feac9f4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.237110] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1912.237325] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a164f1f6-460e-41e1-ae0e-7f811e0d14af {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.239388] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1912.239562] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1912.240521] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87f7bb72-1043-4800-bc0d-2418a844db6b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.243724] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8868dec9-d549-40d1-b780-e63821571b4a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.248312] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1912.248312] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52d75a40-1b5e-1ae4-4918-c523ed6280f4" [ 1912.248312] env[61649]: _type = "Task" [ 1912.248312] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.253237] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd47fd9-e193-4903-954d-8fc3e0765117 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.260891] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52d75a40-1b5e-1ae4-4918-c523ed6280f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.286741] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cad1700-636e-4928-b418-41b465250acf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.294097] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2daa0d19-4960-4c1d-922c-5aed774c1ef6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.301207] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1912.301431] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1912.301615] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Deleting the datastore file [datastore1] ff225293-ad72-499a-9b5b-147d0bc40350 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1912.311177] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ce62cb2-0c5d-445f-9ffa-a43db7ea4813 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.313063] env[61649]: DEBUG nova.compute.provider_tree [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1912.313543] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg a79c0f7017a24487b077b939530296ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.317802] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Waiting for the task: (returnval){ [ 1912.317802] env[61649]: value = "task-158288" [ 1912.317802] env[61649]: _type = "Task" [ 1912.317802] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.321697] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a79c0f7017a24487b077b939530296ac [ 1912.322624] env[61649]: DEBUG nova.scheduler.client.report [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1912.324953] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 19cf9eaf57d5476794455aa7ea348d26 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.329059] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Task: {'id': task-158288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.335774] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19cf9eaf57d5476794455aa7ea348d26 [ 1912.336631] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.279s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.337271] env[61649]: ERROR nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1912.337271] env[61649]: Faults: ['InvalidArgument'] [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Traceback (most recent call last): [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self.driver.spawn(context, instance, image_meta, [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self._fetch_image_if_missing(context, vi) [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] image_cache(vi, tmp_image_ds_loc) [ 1912.337271] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] vm_util.copy_virtual_disk( [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] session._wait_for_task(vmdk_copy_task) [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] return self.wait_for_task(task_ref) [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] return evt.wait() [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] result = hub.switch() [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] return self.greenlet.switch() [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1912.337724] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] self.f(*self.args, **self.kw) [ 1912.338113] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1912.338113] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] raise exceptions.translate_fault(task_info.error) [ 1912.338113] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1912.338113] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Faults: ['InvalidArgument'] [ 1912.338113] env[61649]: ERROR nova.compute.manager [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] [ 1912.338568] env[61649]: DEBUG nova.compute.utils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1912.340345] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Build of instance 4b87e74a-2408-466f-b1c2-68330c31fb9d was re-scheduled: A specified parameter was not correct: fileType [ 1912.340345] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1912.340727] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1912.340899] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1912.341082] env[61649]: DEBUG nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1912.341250] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1912.582846] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 602e73bf70624c999ca2541d065b7cb4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.593125] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 602e73bf70624c999ca2541d065b7cb4 [ 1912.594153] env[61649]: DEBUG nova.network.neutron [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.594153] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg ea4964f985e941188f66ee402a054dd4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.603038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4964f985e941188f66ee402a054dd4 [ 1912.603594] env[61649]: INFO nova.compute.manager [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Took 0.26 seconds to deallocate network for instance. [ 1912.605341] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg f49ed9bc142c4774a32dcec943afa26e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.636791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f49ed9bc142c4774a32dcec943afa26e [ 1912.639660] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 8270e25c5d894a06bc68b0ccd93b0f81 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.670391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8270e25c5d894a06bc68b0ccd93b0f81 [ 1912.688828] env[61649]: INFO nova.scheduler.client.report [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Deleted allocations for instance 4b87e74a-2408-466f-b1c2-68330c31fb9d [ 1912.696626] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 9d2435e7c8da468f855f7412d2269c95 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.710108] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d2435e7c8da468f855f7412d2269c95 [ 1912.710965] env[61649]: DEBUG oslo_concurrency.lockutils [None req-98c20f81-2229-45eb-9c58-1b009b8580a0 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 675.898s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.711500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 333825efc9bf4199b54ff6b875642ed5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.712504] env[61649]: DEBUG oslo_concurrency.lockutils [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.524s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.712733] env[61649]: DEBUG oslo_concurrency.lockutils [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "4b87e74a-2408-466f-b1c2-68330c31fb9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.712935] env[61649]: DEBUG oslo_concurrency.lockutils [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.713481] env[61649]: DEBUG oslo_concurrency.lockutils [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.715449] env[61649]: INFO nova.compute.manager [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Terminating instance [ 1912.717618] env[61649]: DEBUG nova.compute.manager [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1912.717794] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1912.718269] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49fd08a4-b51d-4a11-8d5e-96bcc7c12589 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.727633] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa896416-0a41-4a1d-93c9-54543f7f62cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.738007] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 333825efc9bf4199b54ff6b875642ed5 [ 1912.738502] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1912.740214] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 1a28a7e574f3464b861903f4e057e8cc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.759145] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4b87e74a-2408-466f-b1c2-68330c31fb9d could not be found. [ 1912.759145] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1912.759145] env[61649]: INFO nova.compute.manager [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1912.759145] env[61649]: DEBUG oslo.service.loopingcall [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1912.760994] env[61649]: DEBUG nova.compute.manager [-] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1912.761090] env[61649]: DEBUG nova.network.neutron [-] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1912.767658] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1912.767891] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1912.768124] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93204254-4c99-4fff-9f0c-c7aba4ad7dfe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.774663] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a28a7e574f3464b861903f4e057e8cc [ 1912.780315] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1912.780529] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Fetch image to [datastore1] vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1912.780772] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1912.781440] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fe6bea-e896-4b09-af43-1d761f7ad598 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.785654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e06eba6cf9c341fe8c5a9118a0562d9b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.788856] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aef95af-b7f0-4641-9492-732c58ec1bc3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.791675] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.791896] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.793323] env[61649]: INFO nova.compute.claims [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1912.794839] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 8b1fc409eb324ea5af64d115a81fe214 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.796194] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e06eba6cf9c341fe8c5a9118a0562d9b [ 1912.796928] env[61649]: DEBUG nova.network.neutron [-] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.797246] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a89dae107e8b4adda88d5d426976330a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.803391] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a16da66-2591-4381-81e4-5745bbbaab0b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.807284] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a89dae107e8b4adda88d5d426976330a [ 1912.807720] env[61649]: INFO nova.compute.manager [-] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] Took 0.05 seconds to deallocate network for instance. [ 1912.811327] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg e317bbaa6c9a4adf9e05ce930c7379d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.841094] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990b0160-5425-4e6e-8627-6ba426e35aac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.843622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b1fc409eb324ea5af64d115a81fe214 [ 1912.845418] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg f15c9b7430b94ec3820e53a178a3b2a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.846388] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e317bbaa6c9a4adf9e05ce930c7379d5 [ 1912.854930] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f15c9b7430b94ec3820e53a178a3b2a9 [ 1912.855556] env[61649]: DEBUG oslo_vmware.api [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Task: {'id': task-158288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075481} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.857953] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-87664e7f-04af-46fd-bc21-8dfdc1fa1253 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.859709] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1912.859900] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1912.860095] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1912.860269] env[61649]: INFO nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1912.862214] env[61649]: DEBUG nova.compute.claims [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1912.862383] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.867863] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg f18ce0e36b2d4caeb3dcedea0f32621a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.880444] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1912.909820] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f18ce0e36b2d4caeb3dcedea0f32621a [ 1912.912685] env[61649]: DEBUG oslo_concurrency.lockutils [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.913016] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-984d959f-f346-4c6a-8a1a-6d36c724a9ef tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg f3845c5801034cee9ca077a312d1a1a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1912.913676] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 174.894s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.913860] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4b87e74a-2408-466f-b1c2-68330c31fb9d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1912.914034] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "4b87e74a-2408-466f-b1c2-68330c31fb9d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.922560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3845c5801034cee9ca077a312d1a1a7 [ 1912.934410] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1912.994386] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1912.994951] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1913.060902] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcc4f94-0701-4dc7-9401-348900e6c2b9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.068061] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c135b8a-971a-425a-8ec5-6f52e4daf128 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.097154] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e1d68a-8f1d-4b33-bcb9-ba667b43c064 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.104339] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e047e987-ac6c-4e27-a5a6-5676a13152a3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.117327] env[61649]: DEBUG nova.compute.provider_tree [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.117829] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 72196f0321b545559995a62d82f7262b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.125514] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72196f0321b545559995a62d82f7262b [ 1913.126900] env[61649]: DEBUG nova.scheduler.client.report [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1913.129215] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 4efde999d14948f9ba8531169b3c368d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.140419] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4efde999d14948f9ba8531169b3c368d [ 1913.141159] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.141635] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1913.143218] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg cb86ddb6356e41d4a464d83b041fd537 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.144060] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.282s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.145737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 64b961f57f4e46fb9ad651e319310723 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.171845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb86ddb6356e41d4a464d83b041fd537 [ 1913.173148] env[61649]: DEBUG nova.compute.utils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1913.173723] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg f25a7df85052417aa9c6416f0c9878d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.174511] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1913.174678] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1913.177425] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64b961f57f4e46fb9ad651e319310723 [ 1913.183113] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f25a7df85052417aa9c6416f0c9878d5 [ 1913.183624] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1913.185319] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 310f8b1457b6447382b33652c62ce74e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.217326] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 310f8b1457b6447382b33652c62ce74e [ 1913.220337] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 96fcbbb65a9c4646b78b1d9efcbec500 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.222409] env[61649]: DEBUG nova.policy [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8c6e9a3ba6a48669b1772886e22e023', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a732894bf424b5e9e3e972af47a7314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1913.251702] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96fcbbb65a9c4646b78b1d9efcbec500 [ 1913.252940] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1913.277170] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1913.277422] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1913.277607] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1913.277798] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1913.277947] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1913.278093] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1913.278299] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1913.278460] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1913.278623] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1913.278786] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1913.278956] env[61649]: DEBUG nova.virt.hardware [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1913.279980] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972d21c1-e816-4187-9637-48e7b1e91c2a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.290824] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ad593b-6bb6-40f7-a93b-4fd2c7da425b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.334701] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4751efc-db96-4e26-a1de-2ac2c95d3cd2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.342529] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9077dece-1654-47c3-8777-948d87d7589e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.372935] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676da13d-f670-46e4-8e19-8e8a8e9f8c48 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.380480] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b256c9-714e-4820-9470-0ec0844dfb0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.395970] env[61649]: DEBUG nova.compute.provider_tree [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.396469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 9a4b762fcaa04489983d95b435847948 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.403677] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a4b762fcaa04489983d95b435847948 [ 1913.405142] env[61649]: DEBUG nova.scheduler.client.report [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1913.406783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 1d9747cc97b348d698d05a6cb5c6c2ef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.417109] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d9747cc97b348d698d05a6cb5c6c2ef [ 1913.417781] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.418464] env[61649]: ERROR nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = getattr(controller, method)(*args, **kwargs) [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._get(image_id) [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1913.418464] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] resp, body = self.http_client.get(url, headers=header) [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.request(url, 'GET', **kwargs) [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._handle_response(resp) [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exc.from_response(resp, resp.content) [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.418828] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self.driver.spawn(context, instance, image_meta, [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._fetch_image_if_missing(context, vi) [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image_fetch(context, vi, tmp_image_ds_loc) [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] images.fetch_image( [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] metadata = IMAGE_API.get(context, image_ref) [ 1913.419194] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return session.show(context, image_id, [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] _reraise_translated_image_exception(image_id) [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise new_exc.with_traceback(exc_trace) [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = getattr(controller, method)(*args, **kwargs) [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._get(image_id) [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1913.419621] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] resp, body = self.http_client.get(url, headers=header) [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.request(url, 'GET', **kwargs) [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._handle_response(resp) [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exc.from_response(resp, resp.content) [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1913.420071] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.420377] env[61649]: DEBUG nova.compute.utils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1913.420570] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Build of instance ff225293-ad72-499a-9b5b-147d0bc40350 was re-scheduled: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1913.421028] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1913.421201] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1913.421353] env[61649]: DEBUG nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1913.421507] env[61649]: DEBUG nova.network.neutron [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1913.488557] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Successfully created port: bf12cec8-a306-4683-b586-a02ac1f67c72 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1913.517863] env[61649]: DEBUG neutronclient.v2_0.client [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1913.519099] env[61649]: ERROR nova.compute.manager [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = getattr(controller, method)(*args, **kwargs) [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._get(image_id) [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1913.519099] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] resp, body = self.http_client.get(url, headers=header) [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.request(url, 'GET', **kwargs) [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._handle_response(resp) [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exc.from_response(resp, resp.content) [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.519435] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self.driver.spawn(context, instance, image_meta, [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._fetch_image_if_missing(context, vi) [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image_fetch(context, vi, tmp_image_ds_loc) [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] images.fetch_image( [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] metadata = IMAGE_API.get(context, image_ref) [ 1913.519869] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return session.show(context, image_id, [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] _reraise_translated_image_exception(image_id) [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise new_exc.with_traceback(exc_trace) [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = getattr(controller, method)(*args, **kwargs) [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._get(image_id) [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1913.520326] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] resp, body = self.http_client.get(url, headers=header) [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.request(url, 'GET', **kwargs) [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self._handle_response(resp) [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exc.from_response(resp, resp.content) [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] nova.exception.ImageNotAuthorized: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.520718] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._build_and_run_instance(context, instance, image, [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exception.RescheduledException( [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] nova.exception.RescheduledException: Build of instance ff225293-ad72-499a-9b5b-147d0bc40350 was re-scheduled: Not authorized for image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11. [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1913.521219] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] exception_handler_v20(status_code, error_body) [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise client_exc(message=error_message, [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Neutron server returns request_ids: ['req-420d4c0e-c9ac-4a20-86a1-fb4d957790a4'] [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._deallocate_network(context, instance, requested_networks) [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self.network_api.deallocate_for_instance( [ 1913.521691] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] data = neutron.list_ports(**search_opts) [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.list('ports', self.ports_path, retrieve_all, [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] for r in self._pagination(collection, path, **params): [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] res = self.get(path, params=params) [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.522197] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.retry_request("GET", action, body=body, [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.do_request(method, action, body=body, [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._handle_fault_response(status_code, replybody, resp) [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exception.Unauthorized() [ 1913.522593] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] nova.exception.Unauthorized: Not authorized. [ 1913.522974] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.522974] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg f83f2304c4a5443d910a0c27e0743943 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.552567] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f83f2304c4a5443d910a0c27e0743943 [ 1913.575297] env[61649]: INFO nova.scheduler.client.report [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Deleted allocations for instance ff225293-ad72-499a-9b5b-147d0bc40350 [ 1913.580850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg dce7dc9bcba448b7ba7aaccebb787239 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.604383] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dce7dc9bcba448b7ba7aaccebb787239 [ 1913.604860] env[61649]: DEBUG oslo_concurrency.lockutils [None req-122bcea3-1bc4-44a7-ac1d-854ee6b8848e tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "ff225293-ad72-499a-9b5b-147d0bc40350" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 643.263s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.605387] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg d66a96dffbab45cd92be42cb5cf9f54b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.606531] env[61649]: DEBUG oslo_concurrency.lockutils [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "ff225293-ad72-499a-9b5b-147d0bc40350" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 447.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.606767] env[61649]: DEBUG oslo_concurrency.lockutils [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Acquiring lock "ff225293-ad72-499a-9b5b-147d0bc40350-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.606948] env[61649]: DEBUG oslo_concurrency.lockutils [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "ff225293-ad72-499a-9b5b-147d0bc40350-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.607116] env[61649]: DEBUG oslo_concurrency.lockutils [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "ff225293-ad72-499a-9b5b-147d0bc40350-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.609043] env[61649]: INFO nova.compute.manager [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Terminating instance [ 1913.610991] env[61649]: DEBUG nova.compute.manager [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1913.611177] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1913.611638] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a6676f6-469d-4862-bf33-12590ad6c796 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.616525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d66a96dffbab45cd92be42cb5cf9f54b [ 1913.616939] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1913.619017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg b12ae753a6104adcaf2ea15ffdf266dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.623803] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bdd366-5470-4f4f-88f5-9cb6291a177e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.650696] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b12ae753a6104adcaf2ea15ffdf266dc [ 1913.651183] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff225293-ad72-499a-9b5b-147d0bc40350 could not be found. [ 1913.651368] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1913.651542] env[61649]: INFO nova.compute.manager [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1913.651778] env[61649]: DEBUG oslo.service.loopingcall [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.654021] env[61649]: DEBUG nova.compute.manager [-] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1913.654127] env[61649]: DEBUG nova.network.neutron [-] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1913.669562] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.669562] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.670141] env[61649]: INFO nova.compute.claims [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1913.671281] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 38b3ea2c5e9a4ce6aa52392157dc92c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.707219] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38b3ea2c5e9a4ce6aa52392157dc92c8 [ 1913.709343] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 935c37d8a6b24c65bf3d785e89392e01 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.716639] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 935c37d8a6b24c65bf3d785e89392e01 [ 1913.809164] env[61649]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61649) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1913.809420] env[61649]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-80c68037-d47b-4395-bb14-194ec7ee192f'] [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1913.810001] env[61649]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1913.810486] env[61649]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.811018] env[61649]: ERROR oslo.service.loopingcall [ 1913.811475] env[61649]: ERROR nova.compute.manager [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.811988] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg e859f536b1964e57937ab4a4ae9585b8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.839185] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e859f536b1964e57937ab4a4ae9585b8 [ 1913.840908] env[61649]: ERROR nova.compute.manager [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] exception_handler_v20(status_code, error_body) [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise client_exc(message=error_message, [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Neutron server returns request_ids: ['req-80c68037-d47b-4395-bb14-194ec7ee192f'] [ 1913.840908] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During handling of the above exception, another exception occurred: [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Traceback (most recent call last): [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._delete_instance(context, instance, bdms) [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._shutdown_instance(context, instance, bdms) [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._try_deallocate_network(context, instance, requested_networks) [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] with excutils.save_and_reraise_exception(): [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.841303] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self.force_reraise() [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise self.value [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] _deallocate_network_with_retries() [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return evt.wait() [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = hub.switch() [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.greenlet.switch() [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1913.841678] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = func(*self.args, **self.kw) [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] result = f(*args, **kwargs) [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._deallocate_network( [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self.network_api.deallocate_for_instance( [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] data = neutron.list_ports(**search_opts) [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.list('ports', self.ports_path, retrieve_all, [ 1913.842145] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] for r in self._pagination(collection, path, **params): [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] res = self.get(path, params=params) [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.retry_request("GET", action, body=body, [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1913.842537] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] return self.do_request(method, action, body=body, [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] ret = obj(*args, **kwargs) [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] self._handle_fault_response(status_code, replybody, resp) [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.843003] env[61649]: ERROR nova.compute.manager [instance: ff225293-ad72-499a-9b5b-147d0bc40350] [ 1913.843003] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 90f230a478e6457dad4ce5a7e7bf32e5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.861849] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978637f3-e196-42ec-90dc-e11b7c0a43cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.867490] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90f230a478e6457dad4ce5a7e7bf32e5 [ 1913.870307] env[61649]: DEBUG oslo_concurrency.lockutils [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Lock "ff225293-ad72-499a-9b5b-147d0bc40350" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.264s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.871344] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 59362bc62f754c99b5f79884f5aa4117 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.871826] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "ff225293-ad72-499a-9b5b-147d0bc40350" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 175.852s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.872052] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] During sync_power_state the instance has a pending task (deleting). Skip. [ 1913.872475] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "ff225293-ad72-499a-9b5b-147d0bc40350" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.873171] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b64dfd-a6e0-4477-80a5-dbe1138d0dc4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.904966] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59362bc62f754c99b5f79884f5aa4117 [ 1913.906164] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg 11d27fe9d57f4f73ac19f55a263bbd1f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.907488] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d5eebb-4b80-46f6-a4e7-f0352e52f5a5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.915756] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aea7200-3382-41a8-8a22-7effa3fa5a9d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.920297] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11d27fe9d57f4f73ac19f55a263bbd1f [ 1913.922166] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Expecting reply to msg ea4fde2465644b63b37c8bb0ab80c343 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.930591] env[61649]: DEBUG nova.compute.provider_tree [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.931036] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 50b2da8d3a48450e9ff9da0e609dfb3b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.937644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50b2da8d3a48450e9ff9da0e609dfb3b [ 1913.938460] env[61649]: DEBUG nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1913.940661] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg c5f37dd24e6d4ec68f85e76ac5095765 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.953834] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea4fde2465644b63b37c8bb0ab80c343 [ 1913.954751] env[61649]: INFO nova.compute.manager [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] [instance: ff225293-ad72-499a-9b5b-147d0bc40350] Successfully reverted task state from None on failure for instance. [ 1913.957381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5f37dd24e6d4ec68f85e76ac5095765 [ 1913.958032] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.290s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.958465] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1913.960307] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 29b9ebe8f98445838b25f0461b5276e9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server [None req-51aaa883-591f-435a-9c24-7ccec09215c8 tempest-ListImageFiltersTestJSON-588403986 tempest-ListImageFiltersTestJSON-588403986-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-80c68037-d47b-4395-bb14-194ec7ee192f'] [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1913.961931] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1913.962430] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1913.962989] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server raise self.value [ 1913.963598] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1805, in deallocate_for_instance [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1913.964155] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1913.964698] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1913.965304] env[61649]: ERROR oslo_messaging.rpc.server [ 1913.992758] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29b9ebe8f98445838b25f0461b5276e9 [ 1913.994247] env[61649]: DEBUG nova.compute.utils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1913.994975] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 3dd524d095574ded9b72840224eb8a36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1913.996127] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1913.996419] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1914.004038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dd524d095574ded9b72840224eb8a36 [ 1914.004650] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1914.006394] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg f248fb5be67b43cea8eaf0a96943a5f8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.041021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f248fb5be67b43cea8eaf0a96943a5f8 [ 1914.044176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 79d45f4db3614d3aa13efc79c4ec7c52 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.048693] env[61649]: DEBUG nova.policy [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47b5978abdfb4e288e317fa53fb7a54b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be725bf46ff647018ed76001b586f633', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1914.073152] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79d45f4db3614d3aa13efc79c4ec7c52 [ 1914.074726] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1914.097267] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1914.097756] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1914.098063] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1914.098484] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1914.098770] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1914.099040] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1914.099403] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1914.099716] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1914.100048] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1914.100358] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1914.100667] env[61649]: DEBUG nova.virt.hardware [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1914.101644] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48df70ec-e4ff-4850-ae84-d2fdb5164629 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.112637] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df779fff-7d1c-42ff-a944-5ba350b82588 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.181801] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Successfully updated port: bf12cec8-a306-4683-b586-a02ac1f67c72 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1914.183282] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg d2267306311e4dffa1ff3bf80c90dff8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.192847] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2267306311e4dffa1ff3bf80c90dff8 [ 1914.193648] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "refresh_cache-f0e69971-df47-4ef0-85c9-ac686e4a4f9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.193912] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "refresh_cache-f0e69971-df47-4ef0-85c9-ac686e4a4f9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.194161] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1914.194651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg a091aaefe1fe401b8f47e7b1cda32b69 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.201407] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a091aaefe1fe401b8f47e7b1cda32b69 [ 1914.233857] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1914.379364] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Updating instance_info_cache with network_info: [{"id": "bf12cec8-a306-4683-b586-a02ac1f67c72", "address": "fa:16:3e:20:5c:d0", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf12cec8-a3", "ovs_interfaceid": "bf12cec8-a306-4683-b586-a02ac1f67c72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.380466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 46d6f9d8da8b4b1c97af7180b912ea4c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.391377] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Successfully created port: 5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1914.395020] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46d6f9d8da8b4b1c97af7180b912ea4c [ 1914.395694] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "refresh_cache-f0e69971-df47-4ef0-85c9-ac686e4a4f9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.396410] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance network_info: |[{"id": "bf12cec8-a306-4683-b586-a02ac1f67c72", "address": "fa:16:3e:20:5c:d0", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf12cec8-a3", "ovs_interfaceid": "bf12cec8-a306-4683-b586-a02ac1f67c72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1914.397187] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:5c:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf12cec8-a306-4683-b586-a02ac1f67c72', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1914.405444] env[61649]: DEBUG oslo.service.loopingcall [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1914.406007] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1914.406363] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91c291e7-bb6d-47d7-918a-c4ba1b5678aa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.430832] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1914.430832] env[61649]: value = "task-158289" [ 1914.430832] env[61649]: _type = "Task" [ 1914.430832] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.439522] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158289, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.633989] env[61649]: DEBUG nova.compute.manager [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Received event network-vif-plugged-bf12cec8-a306-4683-b586-a02ac1f67c72 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1914.634408] env[61649]: DEBUG oslo_concurrency.lockutils [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Acquiring lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.634916] env[61649]: DEBUG oslo_concurrency.lockutils [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.635207] env[61649]: DEBUG oslo_concurrency.lockutils [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.635505] env[61649]: DEBUG nova.compute.manager [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] No waiting events found dispatching network-vif-plugged-bf12cec8-a306-4683-b586-a02ac1f67c72 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1914.635814] env[61649]: WARNING nova.compute.manager [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Received unexpected event network-vif-plugged-bf12cec8-a306-4683-b586-a02ac1f67c72 for instance with vm_state building and task_state spawning. [ 1914.636150] env[61649]: DEBUG nova.compute.manager [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Received event network-changed-bf12cec8-a306-4683-b586-a02ac1f67c72 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1914.636514] env[61649]: DEBUG nova.compute.manager [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Refreshing instance network info cache due to event network-changed-bf12cec8-a306-4683-b586-a02ac1f67c72. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1914.636853] env[61649]: DEBUG oslo_concurrency.lockutils [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Acquiring lock "refresh_cache-f0e69971-df47-4ef0-85c9-ac686e4a4f9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.637268] env[61649]: DEBUG oslo_concurrency.lockutils [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Acquired lock "refresh_cache-f0e69971-df47-4ef0-85c9-ac686e4a4f9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.637716] env[61649]: DEBUG nova.network.neutron [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Refreshing network info cache for port bf12cec8-a306-4683-b586-a02ac1f67c72 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1914.638346] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Expecting reply to msg 0f3e526d02f144809f0ee1cbfff2b425 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.646338] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f3e526d02f144809f0ee1cbfff2b425 [ 1914.897829] env[61649]: DEBUG nova.compute.manager [req-b208a215-c7de-466d-b81a-77e563787393 req-869cc819-b2b4-4bd5-84c0-b735e282d035 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Received event network-vif-plugged-5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1914.898062] env[61649]: DEBUG oslo_concurrency.lockutils [req-b208a215-c7de-466d-b81a-77e563787393 req-869cc819-b2b4-4bd5-84c0-b735e282d035 service nova] Acquiring lock "4d429147-d3fe-4d99-af2a-e28a3829f434-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.898284] env[61649]: DEBUG oslo_concurrency.lockutils [req-b208a215-c7de-466d-b81a-77e563787393 req-869cc819-b2b4-4bd5-84c0-b735e282d035 service nova] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.898456] env[61649]: DEBUG oslo_concurrency.lockutils [req-b208a215-c7de-466d-b81a-77e563787393 req-869cc819-b2b4-4bd5-84c0-b735e282d035 service nova] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.898626] env[61649]: DEBUG nova.compute.manager [req-b208a215-c7de-466d-b81a-77e563787393 req-869cc819-b2b4-4bd5-84c0-b735e282d035 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] No waiting events found dispatching network-vif-plugged-5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1914.898789] env[61649]: WARNING nova.compute.manager [req-b208a215-c7de-466d-b81a-77e563787393 req-869cc819-b2b4-4bd5-84c0-b735e282d035 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Received unexpected event network-vif-plugged-5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 for instance with vm_state building and task_state spawning. [ 1914.937990] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Successfully updated port: 5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1914.938433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg e3d8d8b0f44d4f29b6b3436eeee2b4a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.942384] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158289, 'name': CreateVM_Task, 'duration_secs': 0.291598} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.942759] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1914.943424] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.948102] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.948502] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1914.949086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3d8d8b0f44d4f29b6b3436eeee2b4a9 [ 1914.950086] env[61649]: DEBUG nova.network.neutron [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Updated VIF entry in instance network info cache for port bf12cec8-a306-4683-b586-a02ac1f67c72. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1914.950394] env[61649]: DEBUG nova.network.neutron [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Updating instance_info_cache with network_info: [{"id": "bf12cec8-a306-4683-b586-a02ac1f67c72", "address": "fa:16:3e:20:5c:d0", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf12cec8-a3", "ovs_interfaceid": "bf12cec8-a306-4683-b586-a02ac1f67c72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.950909] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Expecting reply to msg 0d2157b3188944579817bd7adc271149 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.951620] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcf1449e-5078-4cec-b31f-6d17456e1eeb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.953620] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "refresh_cache-4d429147-d3fe-4d99-af2a-e28a3829f434" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.953755] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired lock "refresh_cache-4d429147-d3fe-4d99-af2a-e28a3829f434" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.953894] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1914.954233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg a6942e49de8f4da5b440b14b8622ae51 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1914.965208] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d2157b3188944579817bd7adc271149 [ 1914.965728] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6942e49de8f4da5b440b14b8622ae51 [ 1914.966190] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1914.966190] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52175e77-1f58-1523-fa34-6803216ee13c" [ 1914.966190] env[61649]: _type = "Task" [ 1914.966190] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.966640] env[61649]: DEBUG oslo_concurrency.lockutils [req-846982ba-29fa-41fa-bbff-48977a155b9d req-13a98038-e07d-4fca-8186-9054b33b3bc6 service nova] Releasing lock "refresh_cache-f0e69971-df47-4ef0-85c9-ac686e4a4f9d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.978111] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.978338] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1914.978544] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.207611] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1915.374332] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Updating instance_info_cache with network_info: [{"id": "5969f2ee-4dbf-4f71-be1c-ebd6cffbe475", "address": "fa:16:3e:c1:53:ba", "network": {"id": "9c11ca0c-6674-4818-854b-4843c98e2edb", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1068587008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be725bf46ff647018ed76001b586f633", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5969f2ee-4d", "ovs_interfaceid": "5969f2ee-4dbf-4f71-be1c-ebd6cffbe475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.374844] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 6f83b4e3a7a44a19bc142302844ac006 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1915.386663] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f83b4e3a7a44a19bc142302844ac006 [ 1915.387204] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Releasing lock "refresh_cache-4d429147-d3fe-4d99-af2a-e28a3829f434" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.387464] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance network_info: |[{"id": "5969f2ee-4dbf-4f71-be1c-ebd6cffbe475", "address": "fa:16:3e:c1:53:ba", "network": {"id": "9c11ca0c-6674-4818-854b-4843c98e2edb", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1068587008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be725bf46ff647018ed76001b586f633", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5969f2ee-4d", "ovs_interfaceid": "5969f2ee-4dbf-4f71-be1c-ebd6cffbe475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1915.387838] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:53:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '683a619f-b10d-41a3-8c03-4f69f6c9ce53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5969f2ee-4dbf-4f71-be1c-ebd6cffbe475', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1915.395249] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating folder: Project (be725bf46ff647018ed76001b586f633). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1915.395739] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f337113-5152-4e11-b47d-3891d2159d2c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.405921] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Created folder: Project (be725bf46ff647018ed76001b586f633) in parent group-v51588. [ 1915.406100] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating folder: Instances. Parent ref: group-v51696. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1915.406305] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d5f5ced-6702-4d1f-8f49-57cff1b616c8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.415351] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Created folder: Instances in parent group-v51696. [ 1915.415568] env[61649]: DEBUG oslo.service.loopingcall [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.415769] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1915.416068] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24b6eed6-777d-4004-b5d9-bd78916f05fe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.434761] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1915.434761] env[61649]: value = "task-158292" [ 1915.434761] env[61649]: _type = "Task" [ 1915.434761] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.442144] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158292, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.945126] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158292, 'name': CreateVM_Task, 'duration_secs': 0.439615} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.945267] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1915.945928] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.946094] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.946440] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1915.946673] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6f82062-f93f-4469-8f96-81c9d9ce0d7d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.950797] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 1915.950797] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a133f8-7dbf-0b98-d57d-4601676d81bf" [ 1915.950797] env[61649]: _type = "Task" [ 1915.950797] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.958302] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a133f8-7dbf-0b98-d57d-4601676d81bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.462363] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.462632] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1916.462850] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.924955] env[61649]: DEBUG nova.compute.manager [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Received event network-changed-5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1916.925168] env[61649]: DEBUG nova.compute.manager [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Refreshing instance network info cache due to event network-changed-5969f2ee-4dbf-4f71-be1c-ebd6cffbe475. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1916.925354] env[61649]: DEBUG oslo_concurrency.lockutils [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] Acquiring lock "refresh_cache-4d429147-d3fe-4d99-af2a-e28a3829f434" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.925508] env[61649]: DEBUG oslo_concurrency.lockutils [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] Acquired lock "refresh_cache-4d429147-d3fe-4d99-af2a-e28a3829f434" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.925640] env[61649]: DEBUG nova.network.neutron [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Refreshing network info cache for port 5969f2ee-4dbf-4f71-be1c-ebd6cffbe475 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1916.926154] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] Expecting reply to msg 72f2844b970a4af98131387afea8ea2e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1916.933086] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72f2844b970a4af98131387afea8ea2e [ 1917.157641] env[61649]: DEBUG nova.network.neutron [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Updated VIF entry in instance network info cache for port 5969f2ee-4dbf-4f71-be1c-ebd6cffbe475. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1917.158147] env[61649]: DEBUG nova.network.neutron [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Updating instance_info_cache with network_info: [{"id": "5969f2ee-4dbf-4f71-be1c-ebd6cffbe475", "address": "fa:16:3e:c1:53:ba", "network": {"id": "9c11ca0c-6674-4818-854b-4843c98e2edb", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1068587008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be725bf46ff647018ed76001b586f633", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5969f2ee-4d", "ovs_interfaceid": "5969f2ee-4dbf-4f71-be1c-ebd6cffbe475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.158961] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] Expecting reply to msg e8a56f57428e4e258d8605b2aedc6cb3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1917.168161] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8a56f57428e4e258d8605b2aedc6cb3 [ 1917.168804] env[61649]: DEBUG oslo_concurrency.lockutils [req-061cd2f0-4c0b-4be5-b1f7-dfab53b9bed3 req-26460c98-b441-40e6-a28b-48a256339b55 service nova] Releasing lock "refresh_cache-4d429147-d3fe-4d99-af2a-e28a3829f434" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.929004] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1917.929471] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1918.929470] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.929851] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1921.924024] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.928634] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.928973] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1925.930028] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1925.930316] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1925.930316] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1925.930914] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b98bc77c30a34260989e4fae176f5402 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1925.948135] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b98bc77c30a34260989e4fae176f5402 [ 1925.950330] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.950473] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.950605] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.950732] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.950854] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.950973] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.951091] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.951208] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.951333] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.951442] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1925.951557] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1926.929269] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1926.929707] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b842d59fe7b54ab290a4246c8c96d80f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1926.938879] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b842d59fe7b54ab290a4246c8c96d80f [ 1926.939863] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.940077] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.940246] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.940401] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1926.941463] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc10d33c-bac5-4e18-8a90-709c8062d2b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.949827] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757e86fe-641a-433a-8a5d-841ba960873e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.963239] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1295fc62-c657-4421-8a3a-a36fa9528829 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.969147] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e8160a-34e6-48e9-9eba-496987fd159b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.999066] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181773MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1926.999337] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.999674] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.000535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 78518a17212d4fb18f8daf9681b3ff23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1927.033792] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78518a17212d4fb18f8daf9681b3ff23 [ 1927.037844] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg deb7acbc3bb64280824f797a3a78d865 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1927.046207] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg deb7acbc3bb64280824f797a3a78d865 [ 1927.063276] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.063580] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.063852] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.064115] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.064415] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.064693] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.064963] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.065207] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.065445] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.065697] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1927.065995] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1927.066274] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1927.186107] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32394d7-263e-4da9-8a54-7ffa6b4ac1de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.193230] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea6013d-1fe2-459c-b536-ac6420d0f3a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.221582] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5781e5b3-3493-4bcc-9819-3ddcbf018d0e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.228136] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49714694-1ef1-46a0-b0f4-86dd5a99f2f0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.240270] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.240763] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4f6e68d585674fbcb45fbad9e1bf843f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1927.247357] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f6e68d585674fbcb45fbad9e1bf843f [ 1927.248205] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1927.250400] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 518c21113ef3429db650f0c6f2dadf38 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1927.266676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 518c21113ef3429db650f0c6f2dadf38 [ 1927.267365] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1927.267549] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.268s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.267808] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.305518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d130a10c360e4e34b19e2042da74e4ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1952.314644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d130a10c360e4e34b19e2042da74e4ba [ 1952.315078] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "d8503feb-d1df-4e1f-8357-e080e8bdb174" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.082887] env[61649]: WARNING oslo_vmware.rw_handles [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1961.082887] env[61649]: ERROR oslo_vmware.rw_handles [ 1961.083601] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1961.085155] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1961.085413] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Copying Virtual Disk [datastore1] vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/0f59bf6b-70c4-4aff-83f5-d58d4ecf2d2f/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1961.086062] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-403e93a7-acb9-4534-a260-ac1c9b8fffb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.094862] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1961.094862] env[61649]: value = "task-158293" [ 1961.094862] env[61649]: _type = "Task" [ 1961.094862] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.105643] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.605288] env[61649]: DEBUG oslo_vmware.exceptions [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1961.605564] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.606111] env[61649]: ERROR nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1961.606111] env[61649]: Faults: ['InvalidArgument'] [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Traceback (most recent call last): [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] yield resources [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self.driver.spawn(context, instance, image_meta, [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self._fetch_image_if_missing(context, vi) [ 1961.606111] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] image_cache(vi, tmp_image_ds_loc) [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] vm_util.copy_virtual_disk( [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] session._wait_for_task(vmdk_copy_task) [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] return self.wait_for_task(task_ref) [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] return evt.wait() [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] result = hub.switch() [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1961.606521] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] return self.greenlet.switch() [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self.f(*self.args, **self.kw) [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] raise exceptions.translate_fault(task_info.error) [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Faults: ['InvalidArgument'] [ 1961.606930] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] [ 1961.606930] env[61649]: INFO nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Terminating instance [ 1961.608030] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.608242] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1961.608483] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ad3063d-a073-45d1-8c11-2450caf6d0fb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.610626] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1961.610810] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1961.611509] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9837d5-f0cf-433b-ba81-c152bd9b3d09 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.618781] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1961.618781] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22cde0c1-c7b4-4d4a-84b4-d268d4a08451 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.620767] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1961.620942] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1961.621912] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be3bed50-1472-48ff-934b-daa51eb2acf7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.626550] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 1961.626550] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]527b04ea-8199-47b0-2b9c-3517df80a6ae" [ 1961.626550] env[61649]: _type = "Task" [ 1961.626550] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.633651] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]527b04ea-8199-47b0-2b9c-3517df80a6ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.684121] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1961.684359] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1961.684542] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleting the datastore file [datastore1] 5f424618-f9b3-4e9a-898c-2d1a07476cc7 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1961.684814] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1039ccfc-bab6-4cbc-b624-a0fb5ffffb1c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.690733] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 1961.690733] env[61649]: value = "task-158295" [ 1961.690733] env[61649]: _type = "Task" [ 1961.690733] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.697938] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.136529] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1962.136889] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1962.137023] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2591ae6b-9d89-446a-9992-67fa634b3502 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.147979] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1962.148209] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Fetch image to [datastore1] vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1962.148380] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1962.149120] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9ead10-813b-4897-9099-988df8c81f2a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.155464] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0eee07-7901-4a8d-acdc-3dc941dab235 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.166226] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0036172-4382-4c37-8eda-3e396b7e8d44 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.202969] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd230d13-17f2-4ee9-be85-dc50dd3f8aa9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.210068] env[61649]: DEBUG oslo_vmware.api [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078008} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.211462] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1962.211655] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1962.211828] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1962.212012] env[61649]: INFO nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1962.213745] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a31e1b6b-5d6b-4729-9e2b-276268f75c4f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.215563] env[61649]: DEBUG nova.compute.claims [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1962.215737] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.215950] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.217791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 52ba549b3f42403bb9d91590050d2627 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.237497] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1962.248357] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52ba549b3f42403bb9d91590050d2627 [ 1962.285873] env[61649]: DEBUG oslo_vmware.rw_handles [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1962.346274] env[61649]: DEBUG oslo_vmware.rw_handles [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1962.346461] env[61649]: DEBUG oslo_vmware.rw_handles [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1962.414868] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb17c99-569b-4695-b003-03bc6465f228 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.422401] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fa8ad4-527f-441d-9ef7-58bbf5faa329 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.451086] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c5f8e8-fdf1-4dd9-a165-d754acda9909 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.457635] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740e63a6-6f85-4e45-8794-47073da84d3f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.470893] env[61649]: DEBUG nova.compute.provider_tree [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1962.471358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg edc685f336584a8aa2028f1081578f37 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.479300] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edc685f336584a8aa2028f1081578f37 [ 1962.480221] env[61649]: DEBUG nova.scheduler.client.report [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1962.482380] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d0b13a97f77c405382c70342d1ced2cc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.492781] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0b13a97f77c405382c70342d1ced2cc [ 1962.493440] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.277s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.493949] env[61649]: ERROR nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1962.493949] env[61649]: Faults: ['InvalidArgument'] [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Traceback (most recent call last): [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self.driver.spawn(context, instance, image_meta, [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self._fetch_image_if_missing(context, vi) [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] image_cache(vi, tmp_image_ds_loc) [ 1962.493949] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] vm_util.copy_virtual_disk( [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] session._wait_for_task(vmdk_copy_task) [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] return self.wait_for_task(task_ref) [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] return evt.wait() [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] result = hub.switch() [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] return self.greenlet.switch() [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1962.494323] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] self.f(*self.args, **self.kw) [ 1962.494685] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1962.494685] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] raise exceptions.translate_fault(task_info.error) [ 1962.494685] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1962.494685] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Faults: ['InvalidArgument'] [ 1962.494685] env[61649]: ERROR nova.compute.manager [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] [ 1962.494685] env[61649]: DEBUG nova.compute.utils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1962.495946] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Build of instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 was re-scheduled: A specified parameter was not correct: fileType [ 1962.495946] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1962.496336] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1962.496509] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1962.496678] env[61649]: DEBUG nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1962.496838] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1962.820689] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4cd7689f90fd49e58a490e74752964d1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.829397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cd7689f90fd49e58a490e74752964d1 [ 1962.829651] env[61649]: DEBUG nova.network.neutron [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.830146] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 994f5f7106714b93bbb56074e8003b20 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.842044] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 994f5f7106714b93bbb56074e8003b20 [ 1962.842676] env[61649]: INFO nova.compute.manager [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Took 0.35 seconds to deallocate network for instance. [ 1962.844328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4847a5a982c04e34b2fa63447c017ee8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.884379] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4847a5a982c04e34b2fa63447c017ee8 [ 1962.886979] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 07e211b4bf724791b26fdf71ddc90c87 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.915945] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e211b4bf724791b26fdf71ddc90c87 [ 1962.938366] env[61649]: INFO nova.scheduler.client.report [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted allocations for instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 [ 1962.944430] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 06babc5755614b5b9ce28aba2ab01976 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1962.955346] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06babc5755614b5b9ce28aba2ab01976 [ 1962.955843] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9e65f57c-30cd-4243-b9a2-52b8be761232 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.252s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.956082] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 404.266s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.956295] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.956493] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.956708] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.958997] env[61649]: INFO nova.compute.manager [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Terminating instance [ 1962.960849] env[61649]: DEBUG nova.compute.manager [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1962.961041] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1962.961496] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e429704-a727-403d-a098-352dd3521413 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.970703] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00598bb-869b-4c21-b869-a857e637fb60 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.998826] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f424618-f9b3-4e9a-898c-2d1a07476cc7 could not be found. [ 1962.999024] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1962.999196] env[61649]: INFO nova.compute.manager [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1962.999439] env[61649]: DEBUG oslo.service.loopingcall [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1962.999711] env[61649]: DEBUG nova.compute.manager [-] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1962.999809] env[61649]: DEBUG nova.network.neutron [-] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 1963.017563] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bedcf1e229784af098ee74f6620f2e1b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1963.023491] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bedcf1e229784af098ee74f6620f2e1b [ 1963.023849] env[61649]: DEBUG nova.network.neutron [-] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.024263] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 99ad23f26db54367821ad88e76eb148e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1963.031773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99ad23f26db54367821ad88e76eb148e [ 1963.032237] env[61649]: INFO nova.compute.manager [-] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] Took 0.03 seconds to deallocate network for instance. [ 1963.035562] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg ca2a00a8621c4e70bd9344538b5d5eb5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1963.060928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca2a00a8621c4e70bd9344538b5d5eb5 [ 1963.075097] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg c009abc8be414a4abecf61fcd6305b06 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1963.112805] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c009abc8be414a4abecf61fcd6305b06 [ 1963.115859] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.116291] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0cf039f-9dd4-4997-b64d-62cd285e50fd tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 330e3f1f3efd4c1ba79aa3b9433fab02 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1963.117039] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 225.097s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.117291] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 5f424618-f9b3-4e9a-898c-2d1a07476cc7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1963.117552] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "5f424618-f9b3-4e9a-898c-2d1a07476cc7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.125435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 330e3f1f3efd4c1ba79aa3b9433fab02 [ 1970.988457] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 939e74446eb94d02a0257010eed796a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1970.997881] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 939e74446eb94d02a0257010eed796a9 [ 1973.296506] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "8295f484-2065-4a21-bdec-7d38e98f93e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.296786] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.297274] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg b075add0eeb84306b1b667af30877733 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.313288] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b075add0eeb84306b1b667af30877733 [ 1973.313721] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1973.315445] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 7313565a373444fd8a4f79cf77c79ac7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.345054] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7313565a373444fd8a4f79cf77c79ac7 [ 1973.361293] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.361531] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.363168] env[61649]: INFO nova.compute.claims [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1973.364884] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg e2baa58c6bb84608966ade972ae3c600 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.396554] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2baa58c6bb84608966ade972ae3c600 [ 1973.398354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg c0dc99da46ba40e9bbbb5555b8805fa6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.406544] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0dc99da46ba40e9bbbb5555b8805fa6 [ 1973.522664] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025ff0ea-ade8-4979-adf8-347c4ef35606 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.530196] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3e9457-2dec-4f7a-96e0-77fda8b0b528 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.559178] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d7ae4b-8560-4cf8-8a64-7d36c135ae48 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.566092] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0b8425-521b-40a5-892c-1c5175a5b02a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.578354] env[61649]: DEBUG nova.compute.provider_tree [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1973.578818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 94f4f82d34ca419b9fdc10d9906c5f39 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.586189] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94f4f82d34ca419b9fdc10d9906c5f39 [ 1973.586981] env[61649]: DEBUG nova.scheduler.client.report [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1973.589356] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 9c302285b1ae4ae990b859b12ca967e6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.599596] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c302285b1ae4ae990b859b12ca967e6 [ 1973.600305] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.239s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.600771] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1973.602493] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 4168d8ecf05842138fa350267e4b6baa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.631953] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4168d8ecf05842138fa350267e4b6baa [ 1973.633387] env[61649]: DEBUG nova.compute.utils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1973.633934] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 1b46c1257ab74c3db258d321ae295c4e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.634658] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1973.634833] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1973.642347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b46c1257ab74c3db258d321ae295c4e [ 1973.642781] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1973.644421] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 24d9e27630134fc99916ad7348e25d64 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.673851] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24d9e27630134fc99916ad7348e25d64 [ 1973.677108] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 6a748a7dd896416b8b7a90b1222a1fe5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1973.679156] env[61649]: DEBUG nova.policy [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0551dadfa2643d18f591f7c00dab53e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e0bd6f2d26e442f92498e358016a346', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 1973.708400] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a748a7dd896416b8b7a90b1222a1fe5 [ 1973.709482] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1973.730372] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1973.730653] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1973.730858] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1973.731076] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1973.731309] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1973.731384] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1973.731643] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1973.731833] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1973.732073] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1973.732280] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1973.732482] env[61649]: DEBUG nova.virt.hardware [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1973.733339] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01cb3a5-6ebc-4a4c-8b6a-61ecfb882ab0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.740857] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80733f7-5094-45cb-b1c1-9800f6e1a099 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.027916] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Successfully created port: 6fb2e8b6-7b17-4d07-893c-11e47e7ee68b {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1974.587025] env[61649]: DEBUG nova.compute.manager [req-50496a18-27c5-4573-a644-05b91e42af29 req-00f850ec-9b50-4247-81f8-5435d6f58175 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Received event network-vif-plugged-6fb2e8b6-7b17-4d07-893c-11e47e7ee68b {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1974.587402] env[61649]: DEBUG oslo_concurrency.lockutils [req-50496a18-27c5-4573-a644-05b91e42af29 req-00f850ec-9b50-4247-81f8-5435d6f58175 service nova] Acquiring lock "8295f484-2065-4a21-bdec-7d38e98f93e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.587450] env[61649]: DEBUG oslo_concurrency.lockutils [req-50496a18-27c5-4573-a644-05b91e42af29 req-00f850ec-9b50-4247-81f8-5435d6f58175 service nova] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.587622] env[61649]: DEBUG oslo_concurrency.lockutils [req-50496a18-27c5-4573-a644-05b91e42af29 req-00f850ec-9b50-4247-81f8-5435d6f58175 service nova] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.587792] env[61649]: DEBUG nova.compute.manager [req-50496a18-27c5-4573-a644-05b91e42af29 req-00f850ec-9b50-4247-81f8-5435d6f58175 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] No waiting events found dispatching network-vif-plugged-6fb2e8b6-7b17-4d07-893c-11e47e7ee68b {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1974.587950] env[61649]: WARNING nova.compute.manager [req-50496a18-27c5-4573-a644-05b91e42af29 req-00f850ec-9b50-4247-81f8-5435d6f58175 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Received unexpected event network-vif-plugged-6fb2e8b6-7b17-4d07-893c-11e47e7ee68b for instance with vm_state building and task_state spawning. [ 1974.606085] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Successfully updated port: 6fb2e8b6-7b17-4d07-893c-11e47e7ee68b {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1974.606551] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 07f1ac9f72e0406eb7044bbb686f753f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1974.613830] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07f1ac9f72e0406eb7044bbb686f753f [ 1974.614480] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "refresh_cache-8295f484-2065-4a21-bdec-7d38e98f93e7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.614609] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "refresh_cache-8295f484-2065-4a21-bdec-7d38e98f93e7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.614749] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 1974.615128] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 02da65ddb349452f9696e8c904d018e8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1974.621904] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02da65ddb349452f9696e8c904d018e8 [ 1974.656934] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 1974.796956] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Updating instance_info_cache with network_info: [{"id": "6fb2e8b6-7b17-4d07-893c-11e47e7ee68b", "address": "fa:16:3e:71:26:4a", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb2e8b6-7b", "ovs_interfaceid": "6fb2e8b6-7b17-4d07-893c-11e47e7ee68b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.797465] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 45ecdc7dfe944a529824fff556799cfe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1974.809852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45ecdc7dfe944a529824fff556799cfe [ 1974.810398] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "refresh_cache-8295f484-2065-4a21-bdec-7d38e98f93e7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.810669] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance network_info: |[{"id": "6fb2e8b6-7b17-4d07-893c-11e47e7ee68b", "address": "fa:16:3e:71:26:4a", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb2e8b6-7b", "ovs_interfaceid": "6fb2e8b6-7b17-4d07-893c-11e47e7ee68b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1974.811046] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:26:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fb2e8b6-7b17-4d07-893c-11e47e7ee68b', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1974.818396] env[61649]: DEBUG oslo.service.loopingcall [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.818822] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1974.819032] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd443e8e-6617-42ad-aa36-523308ece5ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.838948] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1974.838948] env[61649]: value = "task-158296" [ 1974.838948] env[61649]: _type = "Task" [ 1974.838948] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.846175] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158296, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.349427] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158296, 'name': CreateVM_Task, 'duration_secs': 0.320438} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.349672] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1975.350366] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.350485] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.350822] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1975.351057] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-014ec652-5885-4fe0-a671-c0ce9ece8863 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.355268] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 1975.355268] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]527dc4a7-78f9-38c1-613c-1c89f970bb9a" [ 1975.355268] env[61649]: _type = "Task" [ 1975.355268] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.362495] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]527dc4a7-78f9-38c1-613c-1c89f970bb9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.865409] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.866113] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1975.866498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.612290] env[61649]: DEBUG nova.compute.manager [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Received event network-changed-6fb2e8b6-7b17-4d07-893c-11e47e7ee68b {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 1976.612743] env[61649]: DEBUG nova.compute.manager [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Refreshing instance network info cache due to event network-changed-6fb2e8b6-7b17-4d07-893c-11e47e7ee68b. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 1976.613088] env[61649]: DEBUG oslo_concurrency.lockutils [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] Acquiring lock "refresh_cache-8295f484-2065-4a21-bdec-7d38e98f93e7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.613354] env[61649]: DEBUG oslo_concurrency.lockutils [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] Acquired lock "refresh_cache-8295f484-2065-4a21-bdec-7d38e98f93e7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.613633] env[61649]: DEBUG nova.network.neutron [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Refreshing network info cache for port 6fb2e8b6-7b17-4d07-893c-11e47e7ee68b {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 1976.614206] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] Expecting reply to msg fd804e93ef394e7699af9054c2d289aa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1976.621089] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd804e93ef394e7699af9054c2d289aa [ 1976.858631] env[61649]: DEBUG nova.network.neutron [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Updated VIF entry in instance network info cache for port 6fb2e8b6-7b17-4d07-893c-11e47e7ee68b. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 1976.858988] env[61649]: DEBUG nova.network.neutron [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Updating instance_info_cache with network_info: [{"id": "6fb2e8b6-7b17-4d07-893c-11e47e7ee68b", "address": "fa:16:3e:71:26:4a", "network": {"id": "a42f700e-7bbf-46d9-a33a-c4d37e928c52", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1858676203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e0bd6f2d26e442f92498e358016a346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb2e8b6-7b", "ovs_interfaceid": "6fb2e8b6-7b17-4d07-893c-11e47e7ee68b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.859494] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] Expecting reply to msg 064db3fac55d4f95aba40784ae18e44b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1976.867224] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 064db3fac55d4f95aba40784ae18e44b [ 1976.867780] env[61649]: DEBUG oslo_concurrency.lockutils [req-47376417-8f52-4c74-ad63-184f4a41a159 req-747a0aa0-cd04-4e9d-875d-1014925d61f0 service nova] Releasing lock "refresh_cache-8295f484-2065-4a21-bdec-7d38e98f93e7" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.928990] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.929305] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 1978.929448] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.930177] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1983.925640] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1983.928300] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.928495] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1986.929782] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1986.930173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 401dcd552f344d3199ded205b434ea11 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1986.939152] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 401dcd552f344d3199ded205b434ea11 [ 1986.940172] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.940383] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.940552] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.940713] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1986.941805] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459c4f15-93c1-471f-b152-af0e79ee08d8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.950433] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb40c84-a9b8-4a2c-80e6-377b94a876de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.964656] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47babea0-1622-4c7e-b7f2-099375b8db42 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.970775] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025b9b12-1615-4da8-8918-a31c10f5e8ab {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.998207] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181800MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1986.998349] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.998533] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.999320] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0ae9c31a2fad457282e89e184b34db98 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1987.032062] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ae9c31a2fad457282e89e184b34db98 [ 1987.036171] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 25395a30717e465aaa64eeb6eeb97528 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1987.046342] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25395a30717e465aaa64eeb6eeb97528 [ 1987.064024] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064024] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064024] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064024] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064253] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064253] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064253] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064253] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064401] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064401] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1987.064401] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1987.064401] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1987.190227] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee50aa2a-5b4e-434b-8dd3-0d6e950b0651 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.198586] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b790a0-2c4f-457b-87c7-71161e7f141d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.227172] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9894c801-adbf-4daf-8c9c-717000543571 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.233587] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53b9cf6-8b4b-4166-bd8d-10ed84625cdf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.245687] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1987.246134] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg fa13476380f84ceba1162669605589fe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1987.253164] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa13476380f84ceba1162669605589fe [ 1987.254004] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1987.256191] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bef6966608634b28a83bbaacad9c6b00 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1987.269019] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bef6966608634b28a83bbaacad9c6b00 [ 1987.269668] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1987.269857] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.271s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.270026] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1988.270700] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 1988.270994] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 1988.271693] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 697475ab905d49a481c97c7ea184ecad in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1988.288929] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 697475ab905d49a481c97c7ea184ecad [ 1988.291232] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.291515] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.291763] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.292030] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.292283] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.292523] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.292768] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.292998] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.293224] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.293448] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 1988.293686] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 1989.929146] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.925196] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.925196] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 31571a4ae22445dd8fcb43af8e42469b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 1994.942041] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31571a4ae22445dd8fcb43af8e42469b [ 2010.375102] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 080ae819ea174043a7ad5d75d9d642d9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2010.383909] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 080ae819ea174043a7ad5d75d9d642d9 [ 2010.384367] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.098427] env[61649]: WARNING oslo_vmware.rw_handles [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2011.098427] env[61649]: ERROR oslo_vmware.rw_handles [ 2011.099329] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2011.101025] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2011.101277] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Copying Virtual Disk [datastore1] vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/92331601-cc6d-4825-8bb6-f0afa5af22de/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2011.101558] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f313db6-e4ec-4472-94e0-f603ec4f1619 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.109086] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2011.109086] env[61649]: value = "task-158297" [ 2011.109086] env[61649]: _type = "Task" [ 2011.109086] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.116583] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.620650] env[61649]: DEBUG oslo_vmware.exceptions [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2011.620951] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.621513] env[61649]: ERROR nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2011.621513] env[61649]: Faults: ['InvalidArgument'] [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Traceback (most recent call last): [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] yield resources [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self.driver.spawn(context, instance, image_meta, [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self._fetch_image_if_missing(context, vi) [ 2011.621513] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] image_cache(vi, tmp_image_ds_loc) [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] vm_util.copy_virtual_disk( [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] session._wait_for_task(vmdk_copy_task) [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] return self.wait_for_task(task_ref) [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] return evt.wait() [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] result = hub.switch() [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2011.621985] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] return self.greenlet.switch() [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self.f(*self.args, **self.kw) [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] raise exceptions.translate_fault(task_info.error) [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Faults: ['InvalidArgument'] [ 2011.622481] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] [ 2011.622481] env[61649]: INFO nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Terminating instance [ 2011.624537] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2011.624739] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2011.625026] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.625216] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2011.625909] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335b373f-63ec-4703-b9eb-e2650901cf60 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.628446] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b85b6e08-d88f-4721-9be6-150f07dcf49c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.634032] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2011.634227] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc6024cc-d4b0-440e-8e55-f171e278adf1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.636269] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2011.636425] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2011.637315] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f58bcad-a1b4-4b67-849f-326fe22f0032 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.643047] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Waiting for the task: (returnval){ [ 2011.643047] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5207ddac-e854-2f0d-4c32-1bd713b8064e" [ 2011.643047] env[61649]: _type = "Task" [ 2011.643047] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.649946] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5207ddac-e854-2f0d-4c32-1bd713b8064e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.699593] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2011.699846] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2011.700055] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleting the datastore file [datastore1] 4661732c-51dc-4a77-aa32-28049dbd5ad7 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2011.700331] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b291012-78d2-4e97-a827-2ec6fa4773d6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.705995] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2011.705995] env[61649]: value = "task-158299" [ 2011.705995] env[61649]: _type = "Task" [ 2011.705995] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.713571] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.153191] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2012.153405] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Creating directory with path [datastore1] vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2012.153644] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-513469a4-1e12-4472-829c-088cf0b3ffce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.165506] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Created directory with path [datastore1] vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2012.165803] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Fetch image to [datastore1] vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2012.166084] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2012.166930] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032ee57d-98db-43d5-9a32-caf5c25e7695 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.173454] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f84396d-541c-4ac3-a20d-9f38089abd81 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.182209] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7749aab7-e019-4b98-9a81-185795656064 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.215596] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9a7e37-445e-4476-acc3-ac486161d411 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.222166] env[61649]: DEBUG oslo_vmware.api [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071504} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.223536] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2012.223723] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2012.223897] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2012.224088] env[61649]: INFO nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2012.225797] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e5112bda-5a7e-4121-bd13-7744daba2a87 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.227572] env[61649]: DEBUG nova.compute.claims [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2012.227757] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.228043] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.229856] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg e487fd40fd9044d88c6fc2acde5a8f1c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.250976] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2012.262054] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e487fd40fd9044d88c6fc2acde5a8f1c [ 2012.299400] env[61649]: DEBUG oslo_vmware.rw_handles [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2012.362019] env[61649]: DEBUG oslo_vmware.rw_handles [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2012.362211] env[61649]: DEBUG oslo_vmware.rw_handles [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2012.433949] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76d6609-ebea-426e-b819-c2820cbb1625 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.441242] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258f6594-d9b4-4c04-9e77-1888f59a565e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.470110] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad98e2cf-a848-47de-bddf-38d70c6d6574 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.476788] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed992d4-0ba0-479f-8bc9-7f397dd6a046 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.490271] env[61649]: DEBUG nova.compute.provider_tree [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2012.490749] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg d9a0c5e8e4f8432a9bbd5528e071d1e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.498025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9a0c5e8e4f8432a9bbd5528e071d1e7 [ 2012.498949] env[61649]: DEBUG nova.scheduler.client.report [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2012.501274] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 17db3d0c1f014150bc4aa79fe171e262 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.514186] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17db3d0c1f014150bc4aa79fe171e262 [ 2012.514860] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.515371] env[61649]: ERROR nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2012.515371] env[61649]: Faults: ['InvalidArgument'] [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Traceback (most recent call last): [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self.driver.spawn(context, instance, image_meta, [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self._fetch_image_if_missing(context, vi) [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] image_cache(vi, tmp_image_ds_loc) [ 2012.515371] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] vm_util.copy_virtual_disk( [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] session._wait_for_task(vmdk_copy_task) [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] return self.wait_for_task(task_ref) [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] return evt.wait() [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] result = hub.switch() [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] return self.greenlet.switch() [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2012.515791] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] self.f(*self.args, **self.kw) [ 2012.516244] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2012.516244] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] raise exceptions.translate_fault(task_info.error) [ 2012.516244] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2012.516244] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Faults: ['InvalidArgument'] [ 2012.516244] env[61649]: ERROR nova.compute.manager [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] [ 2012.516244] env[61649]: DEBUG nova.compute.utils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2012.517412] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Build of instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 was re-scheduled: A specified parameter was not correct: fileType [ 2012.517412] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2012.517816] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2012.517989] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2012.518159] env[61649]: DEBUG nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2012.518319] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2012.751096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 145f316377774ffaa9a0402c30c0b6bb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.760928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 145f316377774ffaa9a0402c30c0b6bb [ 2012.761497] env[61649]: DEBUG nova.network.neutron [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.761991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 442e0daa41c44f82aa985542d03ed4ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.774558] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 442e0daa41c44f82aa985542d03ed4ba [ 2012.774558] env[61649]: INFO nova.compute.manager [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Took 0.25 seconds to deallocate network for instance. [ 2012.774558] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 3e90406d19db4f25946b100ff3e13431 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.828593] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e90406d19db4f25946b100ff3e13431 [ 2012.831317] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg d35bb74f9fde47d28fb8081a476729ca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.860138] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d35bb74f9fde47d28fb8081a476729ca [ 2012.878654] env[61649]: INFO nova.scheduler.client.report [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted allocations for instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 [ 2012.884949] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 82e9c3d22d31491f84f9585fcb18450b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.894659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82e9c3d22d31491f84f9585fcb18450b [ 2012.895158] env[61649]: DEBUG oslo_concurrency.lockutils [None req-fcc95fcb-0b15-49a0-a6ef-f467419a0835 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 589.828s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.895394] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 394.049s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.895619] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "4661732c-51dc-4a77-aa32-28049dbd5ad7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.895841] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.896035] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.897884] env[61649]: INFO nova.compute.manager [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Terminating instance [ 2012.899649] env[61649]: DEBUG nova.compute.manager [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2012.899874] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2012.900362] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28f38cc8-52e4-4be5-ab13-349350d95a85 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.909348] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7aaebc5-78f7-4268-ba1f-c2e5a380e06c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.936215] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4661732c-51dc-4a77-aa32-28049dbd5ad7 could not be found. [ 2012.936427] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2012.936604] env[61649]: INFO nova.compute.manager [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2012.936888] env[61649]: DEBUG oslo.service.loopingcall [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2012.937114] env[61649]: DEBUG nova.compute.manager [-] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2012.937207] env[61649]: DEBUG nova.network.neutron [-] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2012.953256] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dd68acd0cb574604b1feaf0f780e9c8c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.958854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd68acd0cb574604b1feaf0f780e9c8c [ 2012.959187] env[61649]: DEBUG nova.network.neutron [-] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.959549] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2b8c37de402243ba94a87d8a1a1a5a2e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.967050] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b8c37de402243ba94a87d8a1a1a5a2e [ 2012.967463] env[61649]: INFO nova.compute.manager [-] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] Took 0.03 seconds to deallocate network for instance. [ 2012.970823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 7c0bb25a4cb94156bf8aee40371e0710 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2012.993983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c0bb25a4cb94156bf8aee40371e0710 [ 2013.007823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 5fe51568e2884da9ba96c5ff83389c0e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2013.042176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fe51568e2884da9ba96c5ff83389c0e [ 2013.044842] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.149s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.045127] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c579601c-ce4f-4631-ac51-e6c58b07739e tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg ad303a99eabe4e0ebb7a6a8ec921677d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2013.046254] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 275.025s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.046254] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4661732c-51dc-4a77-aa32-28049dbd5ad7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2013.046254] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "4661732c-51dc-4a77-aa32-28049dbd5ad7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.054843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad303a99eabe4e0ebb7a6a8ec921677d [ 2038.929400] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.929801] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2039.930299] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.929220] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2044.930574] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2045.924601] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2045.929232] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2045.929542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a6a251434d6d4051801ef123471041f0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2045.936121] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6a251434d6d4051801ef123471041f0 [ 2046.936033] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2047.929385] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2047.929812] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bd27521e363f47d68b4aed4dc2d3d6c0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2047.939213] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd27521e363f47d68b4aed4dc2d3d6c0 [ 2047.940241] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.940445] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.940604] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.940756] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2047.941811] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c2fb4f-852a-40b5-bf98-047e8e6f4b25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.950431] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffac55b0-f7f7-4157-9384-2ac3e97e1881 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.964539] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60859599-4a2c-4322-90e8-f2425cf1ad91 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.970526] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c648880-0b1b-4936-a678-f721676694a7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.001082] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181789MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2048.001221] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.001411] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.002241] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 321ce9ecdae34a37933e1a10a2669390 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2048.033132] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 321ce9ecdae34a37933e1a10a2669390 [ 2048.036826] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1a603ed3dab6461bb44ea9e8774a08a0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2048.045805] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a603ed3dab6461bb44ea9e8774a08a0 [ 2048.061729] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.061881] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062008] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062128] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062244] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062356] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062469] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062580] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062690] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.062864] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2048.062996] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2048.165012] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2ebf32-7179-4dfe-83a5-6b3a60a42503 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.172192] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc7f303-f424-4960-9547-b469fb9c84c8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.201487] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d7d0f9-d9e2-4f1e-bced-8d898349e595 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.207692] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e60bef-c277-4d1b-b595-853bd35ae164 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.220518] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2048.220941] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ae46014bda46443eada19458fa417a01 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2048.228033] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae46014bda46443eada19458fa417a01 [ 2048.228865] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2048.231035] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 74002f2b805b480089799a9f92fcc1cd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2048.246850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74002f2b805b480089799a9f92fcc1cd [ 2048.246850] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2048.246850] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.245s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.929148] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.929148] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 2048.929392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c25ae98dc2b84b1183722e057c74056a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2048.937792] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c25ae98dc2b84b1183722e057c74056a [ 2049.939099] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2049.939398] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2049.939398] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2049.940015] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5edb2811ff0a4a1f8443e7015a51ab30 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2049.956588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5edb2811ff0a4a1f8443e7015a51ab30 [ 2049.958655] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.958807] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.958939] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959064] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959189] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959310] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959429] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959546] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959714] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2049.959825] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2050.929286] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2057.930762] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2057.931078] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 2057.931551] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d7e91be265b2470598db7020999fb5a0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2057.940532] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7e91be265b2470598db7020999fb5a0 [ 2057.941109] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 2059.241645] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.241928] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2059.242335] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 4980052dfb3e470d999f9e91e9e9379c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.251986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4980052dfb3e470d999f9e91e9e9379c [ 2059.252417] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2059.253969] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 3dcd352ea104477997ff84a0b2b0985a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.282622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dcd352ea104477997ff84a0b2b0985a [ 2059.297256] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.297490] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2059.298934] env[61649]: INFO nova.compute.claims [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2059.300807] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 4048c1cec10e44b68755bc45c36f1a30 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.330082] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4048c1cec10e44b68755bc45c36f1a30 [ 2059.331638] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 3518f7f2f2024911ab8b9f6fa7228539 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.338278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3518f7f2f2024911ab8b9f6fa7228539 [ 2059.464855] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c2373e-0e95-4a51-9473-bb40070ff3dc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.473108] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f22cc0d-f828-48b2-8ca9-ac869350ef46 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.502800] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3415d154-ba4d-4d9b-b391-8dd28a6f9443 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.509371] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5386eac1-b29c-492d-873e-12e7ecf9a027 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.522111] env[61649]: DEBUG nova.compute.provider_tree [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2059.522581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 2308c539cb3e4374ab931152f96ef6bd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.530418] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2308c539cb3e4374ab931152f96ef6bd [ 2059.531402] env[61649]: DEBUG nova.scheduler.client.report [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2059.533558] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 2985a452e2b649a7a83d174b08d3f50f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.545781] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2985a452e2b649a7a83d174b08d3f50f [ 2059.546520] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.249s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.546961] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2059.548615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 34c75bfa270b4400ad94759b4bf5a487 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.578291] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34c75bfa270b4400ad94759b4bf5a487 [ 2059.579740] env[61649]: DEBUG nova.compute.utils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2059.580345] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 2b73f204b8764b729df35cc1ace29a2d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.581128] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2059.581283] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2059.591031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b73f204b8764b729df35cc1ace29a2d [ 2059.591516] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2059.593061] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 8b8ea525f480445ba4ef7098781d82e1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.620225] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b8ea525f480445ba4ef7098781d82e1 [ 2059.622880] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 9279a9b387b847d28cfa106cd8b63b3e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2059.625595] env[61649]: DEBUG nova.policy [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc5f71ebe35b4863a38dd7606ae87937', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72501ae7a7dd4f85801c096912a5af36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2059.650878] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9279a9b387b847d28cfa106cd8b63b3e [ 2059.651895] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2059.673873] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2059.674136] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2059.674292] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2059.674472] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2059.674618] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2059.674766] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2059.674971] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2059.675129] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2059.675295] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2059.675459] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2059.675623] env[61649]: DEBUG nova.virt.hardware [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2059.676494] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8147b0db-4620-49c9-8c2c-c8dceec7b8b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.684211] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b4740c-933c-4ca9-b469-bf35fdac52c7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.929872] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Successfully created port: 3c77072d-39a0-4419-b4d8-aa1d667200ec {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2060.385152] env[61649]: DEBUG nova.compute.manager [req-e002d1a6-3b77-4e09-a59c-2e478f0c30cb req-dcf53310-2a0f-431a-bd12-0cef08b068f2 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Received event network-vif-plugged-3c77072d-39a0-4419-b4d8-aa1d667200ec {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2060.385399] env[61649]: DEBUG oslo_concurrency.lockutils [req-e002d1a6-3b77-4e09-a59c-2e478f0c30cb req-dcf53310-2a0f-431a-bd12-0cef08b068f2 service nova] Acquiring lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.385526] env[61649]: DEBUG oslo_concurrency.lockutils [req-e002d1a6-3b77-4e09-a59c-2e478f0c30cb req-dcf53310-2a0f-431a-bd12-0cef08b068f2 service nova] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.385596] env[61649]: DEBUG oslo_concurrency.lockutils [req-e002d1a6-3b77-4e09-a59c-2e478f0c30cb req-dcf53310-2a0f-431a-bd12-0cef08b068f2 service nova] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.385764] env[61649]: DEBUG nova.compute.manager [req-e002d1a6-3b77-4e09-a59c-2e478f0c30cb req-dcf53310-2a0f-431a-bd12-0cef08b068f2 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] No waiting events found dispatching network-vif-plugged-3c77072d-39a0-4419-b4d8-aa1d667200ec {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2060.385946] env[61649]: WARNING nova.compute.manager [req-e002d1a6-3b77-4e09-a59c-2e478f0c30cb req-dcf53310-2a0f-431a-bd12-0cef08b068f2 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Received unexpected event network-vif-plugged-3c77072d-39a0-4419-b4d8-aa1d667200ec for instance with vm_state building and task_state spawning. [ 2060.925822] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Successfully updated port: 3c77072d-39a0-4419-b4d8-aa1d667200ec {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2060.926318] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 29d85f3ade4449e08aa692043b467b42 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2060.936040] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29d85f3ade4449e08aa692043b467b42 [ 2060.936763] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "refresh_cache-da4cbfc3-cf43-4cf6-b391-d7183699e58d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.936890] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "refresh_cache-da4cbfc3-cf43-4cf6-b391-d7183699e58d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.937031] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2060.937409] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 142c87f7ec77453d95efe3a576566b6c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2060.944220] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 142c87f7ec77453d95efe3a576566b6c [ 2060.966901] env[61649]: DEBUG nova.compute.manager [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Received event network-changed-3c77072d-39a0-4419-b4d8-aa1d667200ec {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2060.967104] env[61649]: DEBUG nova.compute.manager [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Refreshing instance network info cache due to event network-changed-3c77072d-39a0-4419-b4d8-aa1d667200ec. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2060.967294] env[61649]: DEBUG oslo_concurrency.lockutils [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] Acquiring lock "refresh_cache-da4cbfc3-cf43-4cf6-b391-d7183699e58d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.991589] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2061.122052] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Updating instance_info_cache with network_info: [{"id": "3c77072d-39a0-4419-b4d8-aa1d667200ec", "address": "fa:16:3e:52:37:0c", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c77072d-39", "ovs_interfaceid": "3c77072d-39a0-4419-b4d8-aa1d667200ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.122544] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d947f8938868482f90186fc2d3341527 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2061.134108] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d947f8938868482f90186fc2d3341527 [ 2061.134630] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "refresh_cache-da4cbfc3-cf43-4cf6-b391-d7183699e58d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.134888] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance network_info: |[{"id": "3c77072d-39a0-4419-b4d8-aa1d667200ec", "address": "fa:16:3e:52:37:0c", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c77072d-39", "ovs_interfaceid": "3c77072d-39a0-4419-b4d8-aa1d667200ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2061.135159] env[61649]: DEBUG oslo_concurrency.lockutils [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] Acquired lock "refresh_cache-da4cbfc3-cf43-4cf6-b391-d7183699e58d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.135330] env[61649]: DEBUG nova.network.neutron [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Refreshing network info cache for port 3c77072d-39a0-4419-b4d8-aa1d667200ec {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2061.135703] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] Expecting reply to msg 106bd27ea9dd44e9a7b88ca82a33148a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2061.136500] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:37:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c77072d-39a0-4419-b4d8-aa1d667200ec', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2061.144102] env[61649]: DEBUG oslo.service.loopingcall [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.144653] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2061.144900] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6abc15d9-d4ec-47ec-8640-937575f67940 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.159234] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 106bd27ea9dd44e9a7b88ca82a33148a [ 2061.167675] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2061.167675] env[61649]: value = "task-158300" [ 2061.167675] env[61649]: _type = "Task" [ 2061.167675] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.174894] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158300, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.269312] env[61649]: WARNING oslo_vmware.rw_handles [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2061.269312] env[61649]: ERROR oslo_vmware.rw_handles [ 2061.269893] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2061.272377] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2061.272677] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Copying Virtual Disk [datastore1] vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/d5222410-eaf5-4066-99e3-3d9371112206/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2061.273392] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5e02456-742e-4cb0-b744-c789575cd945 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.280903] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Waiting for the task: (returnval){ [ 2061.280903] env[61649]: value = "task-158301" [ 2061.280903] env[61649]: _type = "Task" [ 2061.280903] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.288766] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Task: {'id': task-158301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.448515] env[61649]: DEBUG nova.network.neutron [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Updated VIF entry in instance network info cache for port 3c77072d-39a0-4419-b4d8-aa1d667200ec. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2061.448963] env[61649]: DEBUG nova.network.neutron [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Updating instance_info_cache with network_info: [{"id": "3c77072d-39a0-4419-b4d8-aa1d667200ec", "address": "fa:16:3e:52:37:0c", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c77072d-39", "ovs_interfaceid": "3c77072d-39a0-4419-b4d8-aa1d667200ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.449519] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] Expecting reply to msg 31a03554a3aa4e76adbaf6b06c903b34 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2061.458162] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31a03554a3aa4e76adbaf6b06c903b34 [ 2061.458740] env[61649]: DEBUG oslo_concurrency.lockutils [req-973498cf-41c3-45a4-b9f8-8842cb76c254 req-11c8441d-7f00-4afb-b87c-b3161a400c05 service nova] Releasing lock "refresh_cache-da4cbfc3-cf43-4cf6-b391-d7183699e58d" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.678193] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158300, 'name': CreateVM_Task, 'duration_secs': 0.295272} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.678343] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2061.678989] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.679142] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.679483] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2061.679741] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f191fb26-7adb-4f61-b16b-191234cd1775 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.683940] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2061.683940] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]521ad69f-40d1-e431-ac41-97246517db38" [ 2061.683940] env[61649]: _type = "Task" [ 2061.683940] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.691512] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]521ad69f-40d1-e431-ac41-97246517db38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.789593] env[61649]: DEBUG oslo_vmware.exceptions [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2061.789974] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.790590] env[61649]: ERROR nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2061.790590] env[61649]: Faults: ['InvalidArgument'] [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Traceback (most recent call last): [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] yield resources [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self.driver.spawn(context, instance, image_meta, [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self._fetch_image_if_missing(context, vi) [ 2061.790590] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] image_cache(vi, tmp_image_ds_loc) [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] vm_util.copy_virtual_disk( [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] session._wait_for_task(vmdk_copy_task) [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] return self.wait_for_task(task_ref) [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] return evt.wait() [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] result = hub.switch() [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2061.790873] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] return self.greenlet.switch() [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self.f(*self.args, **self.kw) [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] raise exceptions.translate_fault(task_info.error) [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Faults: ['InvalidArgument'] [ 2061.791192] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] [ 2061.791382] env[61649]: INFO nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Terminating instance [ 2061.793012] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.793302] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2061.793960] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2061.794198] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2061.794457] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c48100cf-5208-466a-814a-e1eb510d5220 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.796609] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313fcbee-8d1d-4207-87dd-f75d4f78daec {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.804329] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2061.805327] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58c4b1bf-7eda-436c-bde7-b0c95ccc72b3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.806678] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2061.806939] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2061.807618] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9619c7d6-1b13-4906-946a-ab7296b0b0a4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.812681] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 2061.812681] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5229c6e4-355e-56b5-f24b-8f0f6aa19f4e" [ 2061.812681] env[61649]: _type = "Task" [ 2061.812681] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.825509] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2061.825801] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating directory with path [datastore1] vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2061.826043] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98d15cb6-2f03-402c-ae62-65d1fcba6522 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.844500] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Created directory with path [datastore1] vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2061.844759] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Fetch image to [datastore1] vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2061.844982] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2061.845750] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47a17ea-afd6-4c55-91a3-344cbf005c05 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.852943] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc28b91a-8916-4ce7-8abc-b8ece640de51 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.861866] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4555a11-d4d0-4e4b-ae9c-d344a7a56dd0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.890737] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f9a899-c128-4c10-a096-e1888575b600 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.896212] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f0c8d2ac-c2a6-419a-b19e-806cd8a61a8e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.916842] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2062.030685] env[61649]: DEBUG oslo_vmware.rw_handles [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2062.094983] env[61649]: DEBUG oslo_vmware.rw_handles [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2062.094983] env[61649]: DEBUG oslo_vmware.rw_handles [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2062.192925] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.193245] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2062.193521] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.873224] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2062.873626] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2062.873626] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Deleting the datastore file [datastore1] 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2062.873951] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-838d97b5-b925-4694-92e0-ee46f6e219ec {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.881444] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Waiting for the task: (returnval){ [ 2062.881444] env[61649]: value = "task-158303" [ 2062.881444] env[61649]: _type = "Task" [ 2062.881444] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.888794] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Task: {'id': task-158303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.391772] env[61649]: DEBUG oslo_vmware.api [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Task: {'id': task-158303, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067074} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.392215] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2063.392721] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2063.393027] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2063.393312] env[61649]: INFO nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Took 1.60 seconds to destroy the instance on the hypervisor. [ 2063.395470] env[61649]: DEBUG nova.compute.claims [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2063.395739] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.396069] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.397933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 0a129f445d1a460da1648850083f4e12 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.428713] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a129f445d1a460da1648850083f4e12 [ 2063.547987] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b6332d-5b1d-4a93-96bb-ce4f116025e0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.554790] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe131f4-4d4b-4408-b928-529c6f01b3f3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.586653] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a32e449-47b4-4565-b0fa-6726b01ccad2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.602579] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930540a2-dd50-46e1-9cb0-99b56d627c55 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.616031] env[61649]: DEBUG nova.compute.provider_tree [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2063.616031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 512867aa7be440aca394c0e7117b97cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.622323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 512867aa7be440aca394c0e7117b97cb [ 2063.623246] env[61649]: DEBUG nova.scheduler.client.report [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2063.625557] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 920f91d87fc54deba1b276d36159eae8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.637510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 920f91d87fc54deba1b276d36159eae8 [ 2063.638194] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.242s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.638767] env[61649]: ERROR nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2063.638767] env[61649]: Faults: ['InvalidArgument'] [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Traceback (most recent call last): [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self.driver.spawn(context, instance, image_meta, [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self._fetch_image_if_missing(context, vi) [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] image_cache(vi, tmp_image_ds_loc) [ 2063.638767] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] vm_util.copy_virtual_disk( [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] session._wait_for_task(vmdk_copy_task) [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] return self.wait_for_task(task_ref) [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] return evt.wait() [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] result = hub.switch() [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] return self.greenlet.switch() [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2063.639036] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] self.f(*self.args, **self.kw) [ 2063.639318] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2063.639318] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] raise exceptions.translate_fault(task_info.error) [ 2063.639318] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2063.639318] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Faults: ['InvalidArgument'] [ 2063.639318] env[61649]: ERROR nova.compute.manager [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] [ 2063.639956] env[61649]: DEBUG nova.compute.utils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2063.641426] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Build of instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 was re-scheduled: A specified parameter was not correct: fileType [ 2063.641426] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2063.641882] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2063.642120] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2063.642352] env[61649]: DEBUG nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2063.642576] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2063.885822] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 48b123a8a25b4f48ad6502add44e7126 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.897370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48b123a8a25b4f48ad6502add44e7126 [ 2063.897370] env[61649]: DEBUG nova.network.neutron [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.897370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg bd3a8feb4f544245ba4d4511acd9df37 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.906361] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd3a8feb4f544245ba4d4511acd9df37 [ 2063.907403] env[61649]: INFO nova.compute.manager [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Took 0.26 seconds to deallocate network for instance. [ 2063.908761] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 5fb81edb086e43e4bdf12a7a326697c3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.940747] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fb81edb086e43e4bdf12a7a326697c3 [ 2063.943391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 99e63df145fd474aa332793a9288b013 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2063.972385] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99e63df145fd474aa332793a9288b013 [ 2063.991586] env[61649]: INFO nova.scheduler.client.report [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Deleted allocations for instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 [ 2063.997503] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 34df20b20469473f84bd0c494c16f26d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2064.007461] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34df20b20469473f84bd0c494c16f26d [ 2064.007988] env[61649]: DEBUG oslo_concurrency.lockutils [None req-bf26f72c-6c5e-4b41-abdb-45c46fef8e3a tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.241s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.008274] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.917s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.008488] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Acquiring lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.008686] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.008844] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.010924] env[61649]: INFO nova.compute.manager [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Terminating instance [ 2064.012536] env[61649]: DEBUG nova.compute.manager [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2064.012729] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2064.013200] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7e825df-7288-4570-a611-432a877da49e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.022142] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80a332e-ef19-42d8-8810-8cac106c2055 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.050432] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 545947a4-3f1a-44fe-ac02-ec5e2e5844d5 could not be found. [ 2064.050692] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2064.051004] env[61649]: INFO nova.compute.manager [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2064.051288] env[61649]: DEBUG oslo.service.loopingcall [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2064.051513] env[61649]: DEBUG nova.compute.manager [-] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2064.051611] env[61649]: DEBUG nova.network.neutron [-] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2064.250147] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e76b243ecf284da0984bb4f2c061e6fd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2064.258049] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e76b243ecf284da0984bb4f2c061e6fd [ 2064.258049] env[61649]: DEBUG nova.network.neutron [-] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.258049] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg eeced14c38794af6827aad87f2416443 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2064.264927] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eeced14c38794af6827aad87f2416443 [ 2064.265667] env[61649]: INFO nova.compute.manager [-] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] Took 0.21 seconds to deallocate network for instance. [ 2064.269375] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg 3cd88165a3794aeb82f1b98bcd0282ca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2064.299092] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cd88165a3794aeb82f1b98bcd0282ca [ 2064.314104] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg c54a4f5de7df44c69c51af4598bfce49 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2064.352239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c54a4f5de7df44c69c51af4598bfce49 [ 2064.355217] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.347s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.355537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5fd1f9d4-83ac-4871-b754-2375a30ada96 tempest-ServerMetadataNegativeTestJSON-1809307654 tempest-ServerMetadataNegativeTestJSON-1809307654-project-member] Expecting reply to msg a96afb933b61419aa56358b2e823c41c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2064.356213] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 326.335s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.356403] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 545947a4-3f1a-44fe-ac02-ec5e2e5844d5] During sync_power_state the instance has a pending task (deleting). Skip. [ 2064.356573] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "545947a4-3f1a-44fe-ac02-ec5e2e5844d5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.366850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a96afb933b61419aa56358b2e823c41c [ 2090.990152] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e29ccb85d41f41fbaddfc31a4312e9b6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2090.999252] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e29ccb85d41f41fbaddfc31a4312e9b6 [ 2098.939542] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2098.939910] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2099.930039] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.906142] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 30ff9d635df147659907e13ea0c5dca4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2100.914654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30ff9d635df147659907e13ea0c5dca4 [ 2100.915131] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "4d429147-d3fe-4d99-af2a-e28a3829f434" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.929408] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2105.929215] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.924620] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.928317] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.928706] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8612d4e7092e487083f34885fdf793d1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2107.938204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8612d4e7092e487083f34885fdf793d1 [ 2107.939169] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.939389] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.939558] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.939757] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2107.940867] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba751aa-c388-4562-ac59-f0ff2b7b03c6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.949454] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77e1e13-c2ca-4c0f-bacb-6d829b7535c3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.963913] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f86a683-e39c-4974-b71d-13446122d4f6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.969901] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee10ccd-04f1-4d9c-aa75-5833287daa2e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.998119] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181835MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2107.998274] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.998468] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.999323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bae49f7d3ef24f0db679429f69f5c6fd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2108.029682] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bae49f7d3ef24f0db679429f69f5c6fd [ 2108.033277] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e5ed2d96c1c0443dbbb62a41c8bf15da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2108.043250] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5ed2d96c1c0443dbbb62a41c8bf15da [ 2108.091887] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092063] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 28a3b287-8717-42d5-989a-4f66642134f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092198] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092321] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092438] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092555] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092671] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092788] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.092904] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2108.093095] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2108.093230] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2108.109445] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2108.121838] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2108.122023] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2108.132375] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2108.148139] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2108.244691] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef593746-2df8-414b-84e0-afbbd5dd8704 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.251950] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cb89f4-424b-4da8-9129-3b9c9bb2ce3c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.281233] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050dc44e-4402-4104-b799-dfcf770aad41 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.288107] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0ee0e3-ec51-4599-b2f8-94e704b98b6d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.041916] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2109.042386] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 29c0667c65b749eda4c497f05fcd8c84 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2109.050487] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29c0667c65b749eda4c497f05fcd8c84 [ 2109.051381] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2109.053602] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ca3935d64ed64c03ae699cb2df3d345a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2109.082918] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca3935d64ed64c03ae699cb2df3d345a [ 2109.083749] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2109.083954] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.085s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.085082] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2110.929998] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2110.930184] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2110.930303] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2110.930920] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2f8148c98ca64c3dac91d4fe714e2451 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2110.949390] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f8148c98ca64c3dac91d4fe714e2451 [ 2110.951542] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.951748] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.951894] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952037] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952167] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952288] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952404] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952519] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952634] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2110.952758] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2111.134603] env[61649]: WARNING oslo_vmware.rw_handles [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2111.134603] env[61649]: ERROR oslo_vmware.rw_handles [ 2111.135532] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2111.137181] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2111.137437] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Copying Virtual Disk [datastore1] vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/d6d7be5f-b8d7-4751-a2e8-cbb3a646c370/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2111.137727] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ebf8e43-1028-4f77-a42c-ed85a6e28df5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.145477] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 2111.145477] env[61649]: value = "task-158304" [ 2111.145477] env[61649]: _type = "Task" [ 2111.145477] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.153327] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158304, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.655081] env[61649]: DEBUG oslo_vmware.exceptions [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2111.655403] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.655978] env[61649]: ERROR nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2111.655978] env[61649]: Faults: ['InvalidArgument'] [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Traceback (most recent call last): [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] yield resources [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self.driver.spawn(context, instance, image_meta, [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self._fetch_image_if_missing(context, vi) [ 2111.655978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] image_cache(vi, tmp_image_ds_loc) [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] vm_util.copy_virtual_disk( [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] session._wait_for_task(vmdk_copy_task) [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] return self.wait_for_task(task_ref) [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] return evt.wait() [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] result = hub.switch() [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2111.656482] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] return self.greenlet.switch() [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self.f(*self.args, **self.kw) [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] raise exceptions.translate_fault(task_info.error) [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Faults: ['InvalidArgument'] [ 2111.656978] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] [ 2111.656978] env[61649]: INFO nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Terminating instance [ 2111.659073] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2111.659259] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2111.659543] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.659771] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2111.660511] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a63ab6f-77d6-40f0-a433-7fe9b5e5772b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.662948] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0dd6206-85ef-49a2-8eaf-05437e8563f9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.669514] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2111.669808] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b621b0a-97ce-4307-90ad-046250c8c0b1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.672055] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2111.672236] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2111.673168] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a567f52c-9e04-4273-a4d2-4d1f4cdcd9ae {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.677913] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 2111.677913] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5244389c-7804-87cb-2037-53beb4c59e62" [ 2111.677913] env[61649]: _type = "Task" [ 2111.677913] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.684927] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5244389c-7804-87cb-2037-53beb4c59e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.741007] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2111.741232] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2111.741418] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleting the datastore file [datastore1] db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2111.741687] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae822819-542a-4e53-9b21-52dfe79d7a37 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.747721] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for the task: (returnval){ [ 2111.747721] env[61649]: value = "task-158306" [ 2111.747721] env[61649]: _type = "Task" [ 2111.747721] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.754866] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.928618] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.187829] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2112.188259] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating directory with path [datastore1] vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2112.188365] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ddaf2d9-aa75-4cde-81d6-61fe89cbad4d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.198633] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created directory with path [datastore1] vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2112.198809] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Fetch image to [datastore1] vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2112.198976] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2112.199668] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b4b5e8-31dc-4e3c-8d34-9f3de386067b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.206083] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72404a54-0a44-47d4-a2ae-f74ad747d10d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.215392] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7a563f-7464-4acd-95c4-fc78b228e842 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.244412] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190a23f2-9bf9-49aa-9264-32915b3522fc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.251769] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1d40ca3e-0818-4d54-9039-f0c11df8080f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.257748] env[61649]: DEBUG oslo_vmware.api [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Task: {'id': task-158306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078157} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.257975] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2112.258148] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2112.258325] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2112.258498] env[61649]: INFO nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2112.260572] env[61649]: DEBUG nova.compute.claims [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2112.260749] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.260950] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.262933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 3d9e4e2185f6478a84c6f1742af29c5e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.270934] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2112.294541] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d9e4e2185f6478a84c6f1742af29c5e [ 2112.318538] env[61649]: DEBUG oslo_vmware.rw_handles [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2112.379877] env[61649]: DEBUG oslo_vmware.rw_handles [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2112.380090] env[61649]: DEBUG oslo_vmware.rw_handles [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2112.461752] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82f565c-d079-409a-b85e-90539adbaf7e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.469196] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d618281d-04d8-4d08-9536-69ac89ca8825 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.498374] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4903136-51dc-415a-9a3f-b60f1f4113f6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.504858] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3adb1e-e54a-4475-b127-4ba385fed779 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.519500] env[61649]: DEBUG nova.compute.provider_tree [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2112.519995] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg d52ada273f6d4da883d47f66c654b76f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.526826] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d52ada273f6d4da883d47f66c654b76f [ 2112.527658] env[61649]: DEBUG nova.scheduler.client.report [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2112.529783] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 3d449d159661446d98582ac31ae2c20c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.540044] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d449d159661446d98582ac31ae2c20c [ 2112.540688] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.280s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.541191] env[61649]: ERROR nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2112.541191] env[61649]: Faults: ['InvalidArgument'] [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Traceback (most recent call last): [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self.driver.spawn(context, instance, image_meta, [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self._fetch_image_if_missing(context, vi) [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] image_cache(vi, tmp_image_ds_loc) [ 2112.541191] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] vm_util.copy_virtual_disk( [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] session._wait_for_task(vmdk_copy_task) [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] return self.wait_for_task(task_ref) [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] return evt.wait() [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] result = hub.switch() [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] return self.greenlet.switch() [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2112.541540] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] self.f(*self.args, **self.kw) [ 2112.541874] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2112.541874] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] raise exceptions.translate_fault(task_info.error) [ 2112.541874] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2112.541874] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Faults: ['InvalidArgument'] [ 2112.541874] env[61649]: ERROR nova.compute.manager [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] [ 2112.541874] env[61649]: DEBUG nova.compute.utils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2112.543219] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Build of instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f was re-scheduled: A specified parameter was not correct: fileType [ 2112.543219] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2112.543590] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2112.543760] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2112.543928] env[61649]: DEBUG nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2112.544102] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2112.773962] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 42556658514946adbda1545742ad9574 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.785457] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42556658514946adbda1545742ad9574 [ 2112.786042] env[61649]: DEBUG nova.network.neutron [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2112.786552] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg f3e1faf96ab9403e8f4831b7c6dd5d6f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.796075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3e1faf96ab9403e8f4831b7c6dd5d6f [ 2112.796673] env[61649]: INFO nova.compute.manager [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Took 0.25 seconds to deallocate network for instance. [ 2112.798433] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 2b42d0d2fc9c4349b11f8a111a75e775 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.831293] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b42d0d2fc9c4349b11f8a111a75e775 [ 2112.834116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 171dd1195bb24129a0bbb1b22858e157 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.863746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 171dd1195bb24129a0bbb1b22858e157 [ 2112.882879] env[61649]: INFO nova.scheduler.client.report [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Deleted allocations for instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f [ 2112.889207] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 94a86d256ddc4bf78897b0c45eb04fa5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.898414] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94a86d256ddc4bf78897b0c45eb04fa5 [ 2112.898996] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7698a92e-63b9-45dd-8d98-2f9f55a1d5ff tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 606.446s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.899357] env[61649]: DEBUG oslo_concurrency.lockutils [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 410.467s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.899711] env[61649]: DEBUG oslo_concurrency.lockutils [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Acquiring lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.900075] env[61649]: DEBUG oslo_concurrency.lockutils [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.900378] env[61649]: DEBUG oslo_concurrency.lockutils [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.902341] env[61649]: INFO nova.compute.manager [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Terminating instance [ 2112.904070] env[61649]: DEBUG nova.compute.manager [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2112.904364] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2112.904909] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe5d2740-fce0-45f9-9152-dcfe812cb5c2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.914592] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07baf2f2-0378-4fe5-bd4f-aac73584e749 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.941042] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f could not be found. [ 2112.941377] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2112.941660] env[61649]: INFO nova.compute.manager [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2112.942013] env[61649]: DEBUG oslo.service.loopingcall [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2112.942544] env[61649]: DEBUG nova.compute.manager [-] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2112.942746] env[61649]: DEBUG nova.network.neutron [-] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2112.958757] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c927bdeb74f34df1b257490f6a659eed in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.964204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c927bdeb74f34df1b257490f6a659eed [ 2112.964611] env[61649]: DEBUG nova.network.neutron [-] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2112.965085] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 84b5cc4c313d4ef5aa7ca7df1092ef39 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2112.972607] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84b5cc4c313d4ef5aa7ca7df1092ef39 [ 2112.973264] env[61649]: INFO nova.compute.manager [-] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] Took 0.03 seconds to deallocate network for instance. [ 2112.976592] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 24f238fd9f994c7ea165a13967f3b31b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2113.002553] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24f238fd9f994c7ea165a13967f3b31b [ 2113.017057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg 8b9b516ba252443facaef7f8d16ec3df in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2113.051246] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b9b516ba252443facaef7f8d16ec3df [ 2113.053899] env[61649]: DEBUG oslo_concurrency.lockutils [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.054217] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-116cb751-e7ef-4134-8f6c-545a8708a028 tempest-AttachInterfacesTestJSON-1871132450 tempest-AttachInterfacesTestJSON-1871132450-project-member] Expecting reply to msg e9b07a3f66534f29b1e12e340e07071b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2113.054925] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 375.034s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.056136] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2113.056333] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "db2ab0d5-fcbe-44ff-bd94-b0cdb75e643f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.063809] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9b07a3f66534f29b1e12e340e07071b [ 2117.924637] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.925194] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2361f7dd64364287923e7733d8d713b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2117.941259] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2361f7dd64364287923e7733d8d713b4 [ 2149.630304] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "56652181-0379-4532-9b2a-e6138cbd73ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.630606] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "56652181-0379-4532-9b2a-e6138cbd73ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.631017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg ee6dd2a68ca84191b8b54e95edaab93c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.639005] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee6dd2a68ca84191b8b54e95edaab93c [ 2149.639411] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2149.640993] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 1f7d01230d1c4ce08a1de2d419ba17f6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.678274] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f7d01230d1c4ce08a1de2d419ba17f6 [ 2149.696335] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.696335] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.698653] env[61649]: INFO nova.compute.claims [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2149.700991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 5af4195e0383437b8a2e6b7788e28dfd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.731277] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5af4195e0383437b8a2e6b7788e28dfd [ 2149.732752] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 99ef5accc1b44a4e846b4c4136a50277 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.741476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99ef5accc1b44a4e846b4c4136a50277 [ 2149.859500] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb6de04-34fc-4d29-adbe-5284aeb3ab8a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.866904] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbbd87a-151c-45bc-8557-38cc2854dfa4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.896947] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191fdd9f-41fa-4c4a-a2d0-7a3221b00f5d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.903869] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbc076c-d1db-4c8d-970f-78368d2f5429 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.916121] env[61649]: DEBUG nova.compute.provider_tree [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2149.916591] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b58a4e7a46fd4ff0a40e197489904dd5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.923898] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b58a4e7a46fd4ff0a40e197489904dd5 [ 2149.924742] env[61649]: DEBUG nova.scheduler.client.report [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2149.926868] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 226095599ecd4c99842c9929f4e57d6e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.940053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 226095599ecd4c99842c9929f4e57d6e [ 2149.940806] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.244s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.941159] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2149.942739] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 37718f2ae3094eaf88b23eae1b26eb7b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.970120] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37718f2ae3094eaf88b23eae1b26eb7b [ 2149.971737] env[61649]: DEBUG nova.compute.utils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2149.972363] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b4dfe2e5c5e1445cb21b3bba105474da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2149.973337] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2149.973544] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2149.980925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4dfe2e5c5e1445cb21b3bba105474da [ 2149.981390] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2149.983094] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 8f47238a69fb42b89b81b2fbe8044b92 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2150.014158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f47238a69fb42b89b81b2fbe8044b92 [ 2150.016748] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg aa61f029a180402ebc57fc0df48f9d8e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2150.021693] env[61649]: DEBUG nova.policy [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eecfef918474dc8ad298d9eb189f56f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3939f446f6f04aa08a0b91101e55572b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2150.044808] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa61f029a180402ebc57fc0df48f9d8e [ 2150.045912] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2150.066833] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2150.067072] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2150.067230] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2150.067763] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2150.067939] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2150.068120] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2150.068341] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2150.068506] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2150.068673] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2150.068837] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2150.069011] env[61649]: DEBUG nova.virt.hardware [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2150.070257] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f599e1e-6707-422b-b62d-418825248631 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.078220] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b65515-6ca8-4e9c-8de2-98aca7a584d3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.320381] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Successfully created port: 3a761844-f88d-4cbb-acc0-11e550a8da43 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2150.777038] env[61649]: DEBUG nova.compute.manager [req-a24c5a13-475f-43e9-b734-54ee7c493201 req-48f23840-516b-44f0-b5e5-8ee67be87773 service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Received event network-vif-plugged-3a761844-f88d-4cbb-acc0-11e550a8da43 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2150.777272] env[61649]: DEBUG oslo_concurrency.lockutils [req-a24c5a13-475f-43e9-b734-54ee7c493201 req-48f23840-516b-44f0-b5e5-8ee67be87773 service nova] Acquiring lock "56652181-0379-4532-9b2a-e6138cbd73ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.777472] env[61649]: DEBUG oslo_concurrency.lockutils [req-a24c5a13-475f-43e9-b734-54ee7c493201 req-48f23840-516b-44f0-b5e5-8ee67be87773 service nova] Lock "56652181-0379-4532-9b2a-e6138cbd73ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.777647] env[61649]: DEBUG oslo_concurrency.lockutils [req-a24c5a13-475f-43e9-b734-54ee7c493201 req-48f23840-516b-44f0-b5e5-8ee67be87773 service nova] Lock "56652181-0379-4532-9b2a-e6138cbd73ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.777807] env[61649]: DEBUG nova.compute.manager [req-a24c5a13-475f-43e9-b734-54ee7c493201 req-48f23840-516b-44f0-b5e5-8ee67be87773 service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] No waiting events found dispatching network-vif-plugged-3a761844-f88d-4cbb-acc0-11e550a8da43 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2150.777975] env[61649]: WARNING nova.compute.manager [req-a24c5a13-475f-43e9-b734-54ee7c493201 req-48f23840-516b-44f0-b5e5-8ee67be87773 service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Received unexpected event network-vif-plugged-3a761844-f88d-4cbb-acc0-11e550a8da43 for instance with vm_state building and task_state spawning. [ 2150.845373] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Successfully updated port: 3a761844-f88d-4cbb-acc0-11e550a8da43 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2150.845854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 94cc655a8d174852a1184e5ec18252c5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2150.853538] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94cc655a8d174852a1184e5ec18252c5 [ 2150.854303] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "refresh_cache-56652181-0379-4532-9b2a-e6138cbd73ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2150.854499] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "refresh_cache-56652181-0379-4532-9b2a-e6138cbd73ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2150.854657] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2150.855043] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 1c3c922d7bdb462691feefa06cc19c94 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2150.863580] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c3c922d7bdb462691feefa06cc19c94 [ 2150.919333] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2151.072205] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Updating instance_info_cache with network_info: [{"id": "3a761844-f88d-4cbb-acc0-11e550a8da43", "address": "fa:16:3e:62:5c:c4", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a761844-f8", "ovs_interfaceid": "3a761844-f88d-4cbb-acc0-11e550a8da43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2151.072786] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 27c041b48c4643dfa29cabe1dd4630ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2151.084823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27c041b48c4643dfa29cabe1dd4630ac [ 2151.085395] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "refresh_cache-56652181-0379-4532-9b2a-e6138cbd73ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2151.085669] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance network_info: |[{"id": "3a761844-f88d-4cbb-acc0-11e550a8da43", "address": "fa:16:3e:62:5c:c4", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a761844-f8", "ovs_interfaceid": "3a761844-f88d-4cbb-acc0-11e550a8da43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2151.086048] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:5c:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a761844-f88d-4cbb-acc0-11e550a8da43', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2151.093408] env[61649]: DEBUG oslo.service.loopingcall [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2151.093850] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2151.094074] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06be210e-157b-49ed-8cd1-4085c8247126 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.114051] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2151.114051] env[61649]: value = "task-158307" [ 2151.114051] env[61649]: _type = "Task" [ 2151.114051] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.122617] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158307, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.624850] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158307, 'name': CreateVM_Task, 'duration_secs': 0.270938} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.625028] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2151.625688] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2151.625855] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.626167] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2151.626413] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd0f48f3-5174-401a-bbb1-97d343743d85 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.630730] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2151.630730] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52edcf88-cb77-ccd0-91f6-a05f9f555cfe" [ 2151.630730] env[61649]: _type = "Task" [ 2151.630730] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.637825] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52edcf88-cb77-ccd0-91f6-a05f9f555cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.142046] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2152.142321] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2152.142567] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2152.804543] env[61649]: DEBUG nova.compute.manager [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Received event network-changed-3a761844-f88d-4cbb-acc0-11e550a8da43 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2152.804750] env[61649]: DEBUG nova.compute.manager [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Refreshing instance network info cache due to event network-changed-3a761844-f88d-4cbb-acc0-11e550a8da43. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2152.804932] env[61649]: DEBUG oslo_concurrency.lockutils [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] Acquiring lock "refresh_cache-56652181-0379-4532-9b2a-e6138cbd73ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2152.805070] env[61649]: DEBUG oslo_concurrency.lockutils [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] Acquired lock "refresh_cache-56652181-0379-4532-9b2a-e6138cbd73ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2152.805222] env[61649]: DEBUG nova.network.neutron [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Refreshing network info cache for port 3a761844-f88d-4cbb-acc0-11e550a8da43 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2152.805697] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] Expecting reply to msg 5a854f3f58f441c4973d9aa2d98f4b36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2152.812652] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a854f3f58f441c4973d9aa2d98f4b36 [ 2153.018391] env[61649]: DEBUG nova.network.neutron [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Updated VIF entry in instance network info cache for port 3a761844-f88d-4cbb-acc0-11e550a8da43. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2153.018734] env[61649]: DEBUG nova.network.neutron [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Updating instance_info_cache with network_info: [{"id": "3a761844-f88d-4cbb-acc0-11e550a8da43", "address": "fa:16:3e:62:5c:c4", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a761844-f8", "ovs_interfaceid": "3a761844-f88d-4cbb-acc0-11e550a8da43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2153.019242] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] Expecting reply to msg 322e5a330ccb46a2b897582209aef225 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2153.027649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 322e5a330ccb46a2b897582209aef225 [ 2153.028241] env[61649]: DEBUG oslo_concurrency.lockutils [req-9693194f-6947-4e3d-a206-cdf4e85b0bee req-f1d2b641-0229-4ffb-9507-49c50cf23d2d service nova] Releasing lock "refresh_cache-56652181-0379-4532-9b2a-e6138cbd73ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2159.930190] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2159.930190] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2161.298747] env[61649]: WARNING oslo_vmware.rw_handles [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2161.298747] env[61649]: ERROR oslo_vmware.rw_handles [ 2161.299853] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2161.301036] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2161.301275] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Copying Virtual Disk [datastore1] vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/7bb523a7-7fe3-4d0a-aa9b-b19b065eedc4/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2161.301579] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8e6496d-66c8-4bda-9aec-8d2d565ae261 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.310655] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 2161.310655] env[61649]: value = "task-158308" [ 2161.310655] env[61649]: _type = "Task" [ 2161.310655] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.318196] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.820819] env[61649]: DEBUG oslo_vmware.exceptions [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2161.821311] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.821987] env[61649]: ERROR nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2161.821987] env[61649]: Faults: ['InvalidArgument'] [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Traceback (most recent call last): [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] yield resources [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self.driver.spawn(context, instance, image_meta, [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self._fetch_image_if_missing(context, vi) [ 2161.821987] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] image_cache(vi, tmp_image_ds_loc) [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] vm_util.copy_virtual_disk( [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] session._wait_for_task(vmdk_copy_task) [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] return self.wait_for_task(task_ref) [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] return evt.wait() [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] result = hub.switch() [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2161.822318] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] return self.greenlet.switch() [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self.f(*self.args, **self.kw) [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] raise exceptions.translate_fault(task_info.error) [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Faults: ['InvalidArgument'] [ 2161.822618] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] [ 2161.823184] env[61649]: INFO nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Terminating instance [ 2161.824829] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.825038] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2161.825281] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2295df90-4395-4979-a87a-f6738399c4da {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.827644] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2161.827845] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2161.828582] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ad816f-263f-4e9a-9760-8aeb104ddba4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.835353] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2161.835567] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f3860a7-ebd1-4ee1-8923-d4e27797bd90 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.837695] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2161.837878] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2161.838775] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bf03d94-bcd9-476a-a778-cf3f0f974614 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.843330] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 2161.843330] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]525462a4-06d2-798f-359e-5bb8fa6792f8" [ 2161.843330] env[61649]: _type = "Task" [ 2161.843330] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.849926] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]525462a4-06d2-798f-359e-5bb8fa6792f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.898862] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2161.899063] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2161.899249] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleting the datastore file [datastore1] 28a3b287-8717-42d5-989a-4f66642134f7 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2161.899550] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-057046cb-27fb-49e5-9d1b-d8c83c920c1c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.905801] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 2161.905801] env[61649]: value = "task-158310" [ 2161.905801] env[61649]: _type = "Task" [ 2161.905801] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.913205] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.928725] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2162.354500] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2162.354839] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating directory with path [datastore1] vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2162.354986] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8472405e-b0f7-4cbc-9524-86b8e7b20e2b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.365827] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Created directory with path [datastore1] vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2162.366036] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Fetch image to [datastore1] vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2162.366205] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2162.366878] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f08081-38f6-478c-a668-cdc5868181a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.372989] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd2a3a5-4449-4544-8dce-9c62b55b17f1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.381332] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25a73ba-5b84-478d-8595-8e33b49911e6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.412873] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d42274-99ea-4f7d-a893-a803e9da9bbc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.419209] env[61649]: DEBUG oslo_vmware.api [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082979} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.420568] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2162.420748] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2162.420944] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2162.421122] env[61649]: INFO nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2162.422809] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-13e12dd9-44e8-42c5-8cc3-52117d10af94 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.424583] env[61649]: DEBUG nova.compute.claims [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2162.424756] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.424969] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.426917] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg be920d284a064649916a41f4c4367294 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2162.449872] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2162.457862] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be920d284a064649916a41f4c4367294 [ 2162.554223] env[61649]: DEBUG oslo_vmware.rw_handles [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2162.615235] env[61649]: DEBUG oslo_vmware.rw_handles [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2162.615429] env[61649]: DEBUG oslo_vmware.rw_handles [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2162.622317] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad8abae-49ea-47ba-aca2-30a1206e8482 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.630530] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091a70cf-6e75-4747-8a7e-307ac34b723d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.660047] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb28366e-0dd3-4721-ace8-386d8637973d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.667265] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d06ee9-78bc-4ad0-a9aa-e1dabed45dce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.680126] env[61649]: DEBUG nova.compute.provider_tree [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.680658] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg a66c59fecd6e4eb388dfc08190fd92e3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2162.691767] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66c59fecd6e4eb388dfc08190fd92e3 [ 2162.691767] env[61649]: DEBUG nova.scheduler.client.report [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2162.691767] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg ee35b80c23064ca49efe467e83ca2a6b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2162.701208] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee35b80c23064ca49efe467e83ca2a6b [ 2162.701871] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.277s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.702380] env[61649]: ERROR nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2162.702380] env[61649]: Faults: ['InvalidArgument'] [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Traceback (most recent call last): [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self.driver.spawn(context, instance, image_meta, [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self._fetch_image_if_missing(context, vi) [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] image_cache(vi, tmp_image_ds_loc) [ 2162.702380] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] vm_util.copy_virtual_disk( [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] session._wait_for_task(vmdk_copy_task) [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] return self.wait_for_task(task_ref) [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] return evt.wait() [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] result = hub.switch() [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] return self.greenlet.switch() [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2162.702663] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] self.f(*self.args, **self.kw) [ 2162.702926] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2162.702926] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] raise exceptions.translate_fault(task_info.error) [ 2162.702926] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2162.702926] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Faults: ['InvalidArgument'] [ 2162.702926] env[61649]: ERROR nova.compute.manager [instance: 28a3b287-8717-42d5-989a-4f66642134f7] [ 2162.703043] env[61649]: DEBUG nova.compute.utils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2162.704463] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Build of instance 28a3b287-8717-42d5-989a-4f66642134f7 was re-scheduled: A specified parameter was not correct: fileType [ 2162.704463] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2162.704844] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2162.705021] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2162.705189] env[61649]: DEBUG nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2162.705347] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2162.990223] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 6393b4ae24d4418390bc2ac18d638fcd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2162.998047] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6393b4ae24d4418390bc2ac18d638fcd [ 2162.998584] env[61649]: DEBUG nova.network.neutron [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.999053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg ed575e804f74480c82615ff6d52c5d47 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.007771] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed575e804f74480c82615ff6d52c5d47 [ 2163.008386] env[61649]: INFO nova.compute.manager [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Took 0.30 seconds to deallocate network for instance. [ 2163.010178] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg aba6e93403cf432db78f6e26e4a2d5e9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.042528] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aba6e93403cf432db78f6e26e4a2d5e9 [ 2163.045022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 40c4380a9259441697413823a9d014dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.077250] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40c4380a9259441697413823a9d014dc [ 2163.101842] env[61649]: INFO nova.scheduler.client.report [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleted allocations for instance 28a3b287-8717-42d5-989a-4f66642134f7 [ 2163.112100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 84e983d5d98b420a9e03eea9dfbbc433 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.117216] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84e983d5d98b420a9e03eea9dfbbc433 [ 2163.117754] env[61649]: DEBUG oslo_concurrency.lockutils [None req-80d09936-ef38-4b37-8e63-431cd97e87a3 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "28a3b287-8717-42d5-989a-4f66642134f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.579s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.117982] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "28a3b287-8717-42d5-989a-4f66642134f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 425.097s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.118165] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] During sync_power_state the instance has a pending task (spawning). Skip. [ 2163.118333] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "28a3b287-8717-42d5-989a-4f66642134f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.118554] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "28a3b287-8717-42d5-989a-4f66642134f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.561s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.119788] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "28a3b287-8717-42d5-989a-4f66642134f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2163.119788] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "28a3b287-8717-42d5-989a-4f66642134f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.119788] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "28a3b287-8717-42d5-989a-4f66642134f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.121070] env[61649]: INFO nova.compute.manager [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Terminating instance [ 2163.122985] env[61649]: DEBUG nova.compute.manager [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2163.123164] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2163.123788] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9574946d-e19d-4584-9f4e-d5ae3871489f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.136801] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d309f1e-d248-43d0-a1fa-163028a8e304 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.163612] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 28a3b287-8717-42d5-989a-4f66642134f7 could not be found. [ 2163.163825] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2163.164008] env[61649]: INFO nova.compute.manager [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2163.164260] env[61649]: DEBUG oslo.service.loopingcall [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2163.164470] env[61649]: DEBUG nova.compute.manager [-] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2163.164563] env[61649]: DEBUG nova.network.neutron [-] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2163.183055] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5be9125b0b6847599fdbe67749a22df7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.189174] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5be9125b0b6847599fdbe67749a22df7 [ 2163.189803] env[61649]: DEBUG nova.network.neutron [-] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.190183] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 51a4345a6c994deeba00c99ca194334e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.197278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51a4345a6c994deeba00c99ca194334e [ 2163.197735] env[61649]: INFO nova.compute.manager [-] [instance: 28a3b287-8717-42d5-989a-4f66642134f7] Took 0.03 seconds to deallocate network for instance. [ 2163.201462] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg e4b06e066ca84c2dad36ad2664afc082 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.229341] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4b06e066ca84c2dad36ad2664afc082 [ 2163.242837] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg b3ab0f1e8f0744929e0620b5f1bd9e67 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.275742] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3ab0f1e8f0744929e0620b5f1bd9e67 [ 2163.278238] env[61649]: DEBUG oslo_concurrency.lockutils [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "28a3b287-8717-42d5-989a-4f66642134f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.278555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-8067b296-5bf9-4771-b484-6611cb3938e5 tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 0bdf2c9ab18047c898a9f7a3b28f4cfc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2163.287615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bdf2c9ab18047c898a9f7a3b28f4cfc [ 2163.928465] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2166.929765] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2168.924301] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.804548] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 18b14e8768e0459689c3b199f82cd0ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2169.812980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18b14e8768e0459689c3b199f82cd0ce [ 2169.813353] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "8295f484-2065-4a21-bdec-7d38e98f93e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.928639] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.928931] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.929204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b5488001d0c0433fb0a27ea8b0a2360a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2169.937389] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5488001d0c0433fb0a27ea8b0a2360a [ 2169.938335] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.938530] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.938697] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.938851] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2169.939889] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d519d713-4346-4aea-b2a5-253216a166b9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.948486] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29776e29-33ad-4ad4-bb09-88a8933b554c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.961740] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9486e0ab-1e21-4e85-8084-1fa85b860331 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.967488] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27edea5-2a21-4105-b0bb-aaa9292e5704 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.994761] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181843MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2169.994900] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.995085] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.995852] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 308bdd2c53b4442cb654a53e62a07428 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2170.023191] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 308bdd2c53b4442cb654a53e62a07428 [ 2170.026560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 23d62b6c81424e1699f88d6f369cb6c1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2170.034349] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23d62b6c81424e1699f88d6f369cb6c1 [ 2170.050617] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4e47e82d-780e-4c23-8071-083beab2a53f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.050709] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.050786] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.050920] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.051040] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.051155] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.051270] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.051380] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.051553] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2170.051686] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2170.144443] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebae9e33-8931-4cc7-b38b-4f5e08d0aee6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.151655] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53c9a99-8313-4c86-aaf0-f933a46ac386 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.180354] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b81aef-7b7d-442b-a5bc-600473e8e5da {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.186950] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b48afe-6dea-4a3c-a6a9-a87dacec2f80 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.199390] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2170.199835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 29f774bcadaa4175a08d937a1e626105 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2170.206784] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29f774bcadaa4175a08d937a1e626105 [ 2170.207637] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2170.209858] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b78b11af64174958a86e5c634fbcd137 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2170.222438] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b78b11af64174958a86e5c634fbcd137 [ 2170.223098] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2170.223280] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.228s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.223885] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2172.224293] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2172.224293] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2172.224774] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1309daa28868429e9ddeab7651467ae0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2172.240329] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1309daa28868429e9ddeab7651467ae0 [ 2172.242271] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.242424] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.242615] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.242778] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.242932] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.243067] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.243191] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.243314] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2172.243434] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2172.243906] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.734560] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.734869] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.735238] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 818c6241efff41a4bd76d5af501a0ded in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2206.744063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 818c6241efff41a4bd76d5af501a0ded [ 2206.744470] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2206.746157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg bebba41975c14ebf9de01b743261c778 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2206.775807] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bebba41975c14ebf9de01b743261c778 [ 2206.790712] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.790968] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.792358] env[61649]: INFO nova.compute.claims [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2206.793845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 53090805c4ab40f3b0129c847fb5dec5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2206.822920] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53090805c4ab40f3b0129c847fb5dec5 [ 2206.824417] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 63ae31ddc8e1488386faeb28039d0e43 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2206.831397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63ae31ddc8e1488386faeb28039d0e43 [ 2206.937322] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ac99b4-6392-4b6b-8160-eef8828e3758 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.944453] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8566d59c-c4c9-403d-92b9-a115b702bef7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.972672] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa84c84b-944b-40bc-b3a4-96b375b4a996 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.979318] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6594ce96-800a-40ef-9ae1-b5c2eb5a1570 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.992790] env[61649]: DEBUG nova.compute.provider_tree [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2206.993261] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 7ded013f2b1a42b2b864111035d322f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2207.000885] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ded013f2b1a42b2b864111035d322f1 [ 2207.001719] env[61649]: DEBUG nova.scheduler.client.report [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2207.003840] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 929a8fafcf714a5384f1098816235e2d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2207.013806] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 929a8fafcf714a5384f1098816235e2d [ 2207.014452] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.223s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.014907] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2207.016471] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 0e75a7c0a0204d8f939120c55b60dd5b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2207.044143] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e75a7c0a0204d8f939120c55b60dd5b [ 2207.045585] env[61649]: DEBUG nova.compute.utils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2207.046226] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg a49324d7a02d472d975e4fc7dee21386 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2207.047033] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2207.047193] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2207.057571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a49324d7a02d472d975e4fc7dee21386 [ 2207.058088] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2207.059644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg bc31aea64b0043b6b8626d9587e9c6e1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2207.086391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc31aea64b0043b6b8626d9587e9c6e1 [ 2207.088925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 0ac0b90cd0414251a9ad1b8f78d138fe in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2207.091105] env[61649]: DEBUG nova.policy [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8c6e9a3ba6a48669b1772886e22e023', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a732894bf424b5e9e3e972af47a7314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2207.115959] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ac0b90cd0414251a9ad1b8f78d138fe [ 2207.116960] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2207.137255] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2207.137501] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2207.137683] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2207.137883] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2207.138030] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2207.138177] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2207.138529] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2207.138759] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2207.138978] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2207.139188] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2207.139420] env[61649]: DEBUG nova.virt.hardware [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2207.140598] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b19229-a3dd-47b7-b213-8a30af96efc5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.148122] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5708116-9453-4d63-a71b-bd5dd3a4e0eb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.374933] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Successfully created port: 7b49a5b0-3ee4-433a-aea9-e543b3beb952 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2207.942043] env[61649]: DEBUG nova.compute.manager [req-ae7ff97b-dec1-48b0-b37b-8dd6dc00406f req-ca6fb460-5ea0-4f86-a6e6-d5ca8936be8d service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Received event network-vif-plugged-7b49a5b0-3ee4-433a-aea9-e543b3beb952 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2207.942307] env[61649]: DEBUG oslo_concurrency.lockutils [req-ae7ff97b-dec1-48b0-b37b-8dd6dc00406f req-ca6fb460-5ea0-4f86-a6e6-d5ca8936be8d service nova] Acquiring lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2207.942462] env[61649]: DEBUG oslo_concurrency.lockutils [req-ae7ff97b-dec1-48b0-b37b-8dd6dc00406f req-ca6fb460-5ea0-4f86-a6e6-d5ca8936be8d service nova] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.942627] env[61649]: DEBUG oslo_concurrency.lockutils [req-ae7ff97b-dec1-48b0-b37b-8dd6dc00406f req-ca6fb460-5ea0-4f86-a6e6-d5ca8936be8d service nova] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.942818] env[61649]: DEBUG nova.compute.manager [req-ae7ff97b-dec1-48b0-b37b-8dd6dc00406f req-ca6fb460-5ea0-4f86-a6e6-d5ca8936be8d service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] No waiting events found dispatching network-vif-plugged-7b49a5b0-3ee4-433a-aea9-e543b3beb952 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2207.942976] env[61649]: WARNING nova.compute.manager [req-ae7ff97b-dec1-48b0-b37b-8dd6dc00406f req-ca6fb460-5ea0-4f86-a6e6-d5ca8936be8d service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Received unexpected event network-vif-plugged-7b49a5b0-3ee4-433a-aea9-e543b3beb952 for instance with vm_state building and task_state spawning. [ 2208.010824] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Successfully updated port: 7b49a5b0-3ee4-433a-aea9-e543b3beb952 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2208.010824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg b549882fe5754ffbb10a6e09df8ff5a6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2208.018850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b549882fe5754ffbb10a6e09df8ff5a6 [ 2208.018850] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "refresh_cache-09dcd3bd-1baa-4276-b8c5-64de3de036f2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.018850] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "refresh_cache-09dcd3bd-1baa-4276-b8c5-64de3de036f2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.018850] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2208.018850] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 7123fe2f1e3e46a98787ce7f673eed90 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2208.026243] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7123fe2f1e3e46a98787ce7f673eed90 [ 2208.056970] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2208.189857] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Updating instance_info_cache with network_info: [{"id": "7b49a5b0-3ee4-433a-aea9-e543b3beb952", "address": "fa:16:3e:91:0f:23", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b49a5b0-3e", "ovs_interfaceid": "7b49a5b0-3ee4-433a-aea9-e543b3beb952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2208.191247] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg b7adb3c8ca414e638a9f9cedb0a12376 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2208.201548] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7adb3c8ca414e638a9f9cedb0a12376 [ 2208.202219] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "refresh_cache-09dcd3bd-1baa-4276-b8c5-64de3de036f2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2208.202599] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance network_info: |[{"id": "7b49a5b0-3ee4-433a-aea9-e543b3beb952", "address": "fa:16:3e:91:0f:23", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b49a5b0-3e", "ovs_interfaceid": "7b49a5b0-3ee4-433a-aea9-e543b3beb952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2208.203397] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:0f:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b49a5b0-3ee4-433a-aea9-e543b3beb952', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2208.211057] env[61649]: DEBUG oslo.service.loopingcall [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2208.211582] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2208.211913] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-275f5b5f-db9a-45dd-8720-f4d73016960a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.231850] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2208.231850] env[61649]: value = "task-158311" [ 2208.231850] env[61649]: _type = "Task" [ 2208.231850] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.239237] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158311, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.742346] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158311, 'name': CreateVM_Task, 'duration_secs': 0.305664} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.742516] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2208.743169] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.743335] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.743670] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2208.743909] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ae30566-c3a6-4b98-88cb-890f65ede492 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.748141] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2208.748141] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52887fa7-8663-1547-a5ef-4c0c8b180f5f" [ 2208.748141] env[61649]: _type = "Task" [ 2208.748141] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.755755] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52887fa7-8663-1547-a5ef-4c0c8b180f5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.259831] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.260203] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2209.260314] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.978412] env[61649]: DEBUG nova.compute.manager [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Received event network-changed-7b49a5b0-3ee4-433a-aea9-e543b3beb952 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2209.978584] env[61649]: DEBUG nova.compute.manager [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Refreshing instance network info cache due to event network-changed-7b49a5b0-3ee4-433a-aea9-e543b3beb952. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2209.979738] env[61649]: DEBUG oslo_concurrency.lockutils [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] Acquiring lock "refresh_cache-09dcd3bd-1baa-4276-b8c5-64de3de036f2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.979738] env[61649]: DEBUG oslo_concurrency.lockutils [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] Acquired lock "refresh_cache-09dcd3bd-1baa-4276-b8c5-64de3de036f2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.979738] env[61649]: DEBUG nova.network.neutron [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Refreshing network info cache for port 7b49a5b0-3ee4-433a-aea9-e543b3beb952 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2209.979738] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] Expecting reply to msg 9844800dac13479fbdf4c202b692d66f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2209.986604] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9844800dac13479fbdf4c202b692d66f [ 2210.195951] env[61649]: DEBUG nova.network.neutron [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Updated VIF entry in instance network info cache for port 7b49a5b0-3ee4-433a-aea9-e543b3beb952. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2210.195951] env[61649]: DEBUG nova.network.neutron [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Updating instance_info_cache with network_info: [{"id": "7b49a5b0-3ee4-433a-aea9-e543b3beb952", "address": "fa:16:3e:91:0f:23", "network": {"id": "7ccb9efc-b204-4b68-b0ee-59dd352de539", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-398085553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a732894bf424b5e9e3e972af47a7314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b49a5b0-3e", "ovs_interfaceid": "7b49a5b0-3ee4-433a-aea9-e543b3beb952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2210.196174] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] Expecting reply to msg 4e3cb13127914f5bb5b83a36afd6365f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2210.204271] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e3cb13127914f5bb5b83a36afd6365f [ 2210.204979] env[61649]: DEBUG oslo_concurrency.lockutils [req-979415d8-7a68-4bd0-a58f-750a9894429e req-fa70532a-f42d-40f5-a8da-522dbb7e1fb6 service nova] Releasing lock "refresh_cache-09dcd3bd-1baa-4276-b8c5-64de3de036f2" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.991542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 10b0416c362b4502afe171ed93925cc3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2211.000286] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10b0416c362b4502afe171ed93925cc3 [ 2211.166254] env[61649]: WARNING oslo_vmware.rw_handles [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2211.166254] env[61649]: ERROR oslo_vmware.rw_handles [ 2211.166732] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2211.168511] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2211.168761] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Copying Virtual Disk [datastore1] vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/5696e650-7764-43a4-a1ca-21e89caeaa2e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2211.169017] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14c40dbb-a26e-472d-bb0c-3998f3869f2d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.177792] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 2211.177792] env[61649]: value = "task-158312" [ 2211.177792] env[61649]: _type = "Task" [ 2211.177792] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.185252] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': task-158312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.688423] env[61649]: DEBUG oslo_vmware.exceptions [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2211.688695] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2211.689258] env[61649]: ERROR nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2211.689258] env[61649]: Faults: ['InvalidArgument'] [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Traceback (most recent call last): [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] yield resources [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self.driver.spawn(context, instance, image_meta, [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self._fetch_image_if_missing(context, vi) [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2211.689258] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] image_cache(vi, tmp_image_ds_loc) [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] vm_util.copy_virtual_disk( [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] session._wait_for_task(vmdk_copy_task) [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] return self.wait_for_task(task_ref) [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] return evt.wait() [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] result = hub.switch() [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] return self.greenlet.switch() [ 2211.689668] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2211.690190] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self.f(*self.args, **self.kw) [ 2211.690190] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2211.690190] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] raise exceptions.translate_fault(task_info.error) [ 2211.690190] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2211.690190] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Faults: ['InvalidArgument'] [ 2211.690190] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] [ 2211.690190] env[61649]: INFO nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Terminating instance [ 2211.691092] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2211.691308] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2211.691548] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03c7742e-dc5b-473a-88d9-157133c7108e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.693601] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2211.693795] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2211.694497] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c14a112-ddfb-446a-9843-2ec90f45a02b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.701044] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2211.701250] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ce96fe2-9e42-4fa5-a889-870743255d5e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.703246] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2211.703414] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2211.704655] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21e1fa4f-c851-447d-9b3f-c66ccb7759a6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.709015] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2211.709015] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52737aac-ec39-7710-2008-650208dc9226" [ 2211.709015] env[61649]: _type = "Task" [ 2211.709015] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.716205] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52737aac-ec39-7710-2008-650208dc9226, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.218901] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2212.219277] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2212.219510] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6787ba0-c872-4dbc-9f93-d32e1dd8e7ba {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.238509] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2212.238700] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Fetch image to [datastore1] vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2212.238873] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2212.239572] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88803dff-891a-4311-af4e-89f09832485d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.245871] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846084d3-1938-4488-b544-4909a11eb19d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.254313] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae75cdf4-eb76-4361-85c6-ad2091bfcfa2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.284749] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5e74d4-06f8-4d12-998e-c6732cab7e02 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.289818] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ad526d23-9c4c-4c26-8fb2-95e51c027b04 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.310018] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2212.369656] env[61649]: DEBUG oslo_vmware.rw_handles [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2212.430872] env[61649]: DEBUG oslo_vmware.rw_handles [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2212.431053] env[61649]: DEBUG oslo_vmware.rw_handles [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2213.086561] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2213.086780] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2213.086955] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Deleting the datastore file [datastore1] 4e47e82d-780e-4c23-8071-083beab2a53f {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2213.087219] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-087b751f-15e2-4dc0-994e-7eea6ef58ff6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.093424] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for the task: (returnval){ [ 2213.093424] env[61649]: value = "task-158314" [ 2213.093424] env[61649]: _type = "Task" [ 2213.093424] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.100392] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': task-158314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.604015] env[61649]: DEBUG oslo_vmware.api [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Task: {'id': task-158314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156574} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.604425] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2213.604425] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2213.604561] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2213.604739] env[61649]: INFO nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Took 1.91 seconds to destroy the instance on the hypervisor. [ 2213.606796] env[61649]: DEBUG nova.compute.claims [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2213.606971] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2213.607185] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2213.608985] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 871af256d6cc4417a9939dad91b3afb6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2213.640700] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 871af256d6cc4417a9939dad91b3afb6 [ 2213.750980] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89a9fa8-e416-43f7-b041-956f3bf3d92f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.757967] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768fcb4a-f5e0-4c91-8c8d-9a9f321f9722 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.786971] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5a054c-b3d2-4844-a9f4-e5ba6e8994cb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.793526] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96289db8-71a7-4e79-bd67-25895d77be21 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.806565] env[61649]: DEBUG nova.compute.provider_tree [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2213.807031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg ea18a5dd77c34c1fbba8c1db4a9fd977 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2213.814310] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea18a5dd77c34c1fbba8c1db4a9fd977 [ 2213.815163] env[61649]: DEBUG nova.scheduler.client.report [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2213.819275] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 0b637b3ef9954e9182fa3888d0afe762 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2213.829474] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b637b3ef9954e9182fa3888d0afe762 [ 2213.830120] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.223s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.830629] env[61649]: ERROR nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2213.830629] env[61649]: Faults: ['InvalidArgument'] [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Traceback (most recent call last): [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self.driver.spawn(context, instance, image_meta, [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self._fetch_image_if_missing(context, vi) [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] image_cache(vi, tmp_image_ds_loc) [ 2213.830629] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] vm_util.copy_virtual_disk( [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] session._wait_for_task(vmdk_copy_task) [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] return self.wait_for_task(task_ref) [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] return evt.wait() [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] result = hub.switch() [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] return self.greenlet.switch() [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2213.830963] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] self.f(*self.args, **self.kw) [ 2213.831250] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2213.831250] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] raise exceptions.translate_fault(task_info.error) [ 2213.831250] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2213.831250] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Faults: ['InvalidArgument'] [ 2213.831250] env[61649]: ERROR nova.compute.manager [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] [ 2213.831363] env[61649]: DEBUG nova.compute.utils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2213.832848] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Build of instance 4e47e82d-780e-4c23-8071-083beab2a53f was re-scheduled: A specified parameter was not correct: fileType [ 2213.832848] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2213.833226] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2213.833410] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2213.833568] env[61649]: DEBUG nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2213.833734] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2214.144700] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 59ddeb6d824a4db8a9b039ba9981fdec in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.157635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59ddeb6d824a4db8a9b039ba9981fdec [ 2214.158292] env[61649]: DEBUG nova.network.neutron [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2214.158777] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 3cdcf983e00d4098b36da59bb4879d53 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.176540] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cdcf983e00d4098b36da59bb4879d53 [ 2214.176540] env[61649]: INFO nova.compute.manager [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Took 0.34 seconds to deallocate network for instance. [ 2214.177066] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 5cc4d067b5dc4fdc902bcf7e2c118d96 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.214402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cc4d067b5dc4fdc902bcf7e2c118d96 [ 2214.217025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg b22e50a9f315497d89a35c9c54310ede in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.253402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b22e50a9f315497d89a35c9c54310ede [ 2214.273108] env[61649]: INFO nova.scheduler.client.report [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Deleted allocations for instance 4e47e82d-780e-4c23-8071-083beab2a53f [ 2214.279061] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 5ac65f07088648679171f49179cc4177 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.288966] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ac65f07088648679171f49179cc4177 [ 2214.289473] env[61649]: DEBUG oslo_concurrency.lockutils [None req-706189fe-cd31-48ad-b224-2068d4ef4748 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4e47e82d-780e-4c23-8071-083beab2a53f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.637s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.289760] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4e47e82d-780e-4c23-8071-083beab2a53f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.684s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.290000] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Acquiring lock "4e47e82d-780e-4c23-8071-083beab2a53f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.290214] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4e47e82d-780e-4c23-8071-083beab2a53f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.290384] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4e47e82d-780e-4c23-8071-083beab2a53f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.292673] env[61649]: INFO nova.compute.manager [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Terminating instance [ 2214.294331] env[61649]: DEBUG nova.compute.manager [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2214.294513] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2214.294969] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8081c12a-6ffe-46c0-a0cd-5b2b139a767d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.304534] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2549a65f-291c-407c-a259-3f4c69f3658d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.332535] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4e47e82d-780e-4c23-8071-083beab2a53f could not be found. [ 2214.332736] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2214.332911] env[61649]: INFO nova.compute.manager [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2214.333155] env[61649]: DEBUG oslo.service.loopingcall [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2214.333378] env[61649]: DEBUG nova.compute.manager [-] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2214.333469] env[61649]: DEBUG nova.network.neutron [-] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2214.369528] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5980624ebdf14b8bbdd7cfacf7b0cc1b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.375576] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5980624ebdf14b8bbdd7cfacf7b0cc1b [ 2214.375932] env[61649]: DEBUG nova.network.neutron [-] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2214.376328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f855bb6029834ad0a61f38d24f59951c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.383472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f855bb6029834ad0a61f38d24f59951c [ 2214.383907] env[61649]: INFO nova.compute.manager [-] [instance: 4e47e82d-780e-4c23-8071-083beab2a53f] Took 0.05 seconds to deallocate network for instance. [ 2214.387877] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg cb1bb615dc0847659640658d49b53b8c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.414107] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb1bb615dc0847659640658d49b53b8c [ 2214.428499] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 5c3d723c2dd14669b013487b46ea9c74 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.464301] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c3d723c2dd14669b013487b46ea9c74 [ 2214.466991] env[61649]: DEBUG oslo_concurrency.lockutils [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Lock "4e47e82d-780e-4c23-8071-083beab2a53f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.177s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.467317] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-a7ff2140-6bd3-4490-b467-6874b1541880 tempest-ImagesTestJSON-1732047265 tempest-ImagesTestJSON-1732047265-project-member] Expecting reply to msg 93e589291847409c8fa00d43de4a46da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2214.476867] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93e589291847409c8fa00d43de4a46da [ 2220.929263] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2220.929563] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2221.929679] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.929610] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.929457] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2228.924682] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.929056] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.929343] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.929542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2ec5b4e1d8394ee7a836390f4efebb28 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2229.941704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec5b4e1d8394ee7a836390f4efebb28 [ 2229.942273] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.942500] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.942669] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.942827] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2229.944600] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059d1899-e724-4af8-ba10-c47df439f905 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.953087] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeb25ec-4ec1-4371-9b46-9f6bc5d9269f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.966969] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6dc1b8-5277-43e8-a669-c9930f00b950 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.972995] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3567322-23c4-4517-af72-934e5d7cc940 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.002064] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181838MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2230.002211] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.002404] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.003212] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b85765a92ca74bebba630493474e6f46 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2230.032760] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b85765a92ca74bebba630493474e6f46 [ 2230.036148] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7647729144264387a87f4a5225863782 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2230.047282] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7647729144264387a87f4a5225863782 [ 2230.060617] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.060800] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.060952] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.061081] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.061203] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.061419] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.061546] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.061663] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2230.061843] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2230.061979] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2230.154386] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1420bba6-1ffb-47ab-bbf8-71d08ae9ae65 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.161627] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7afcd2-d0fb-44f1-b96a-cac924060d61 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.191718] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dfca20-7fcc-461a-ae45-d5550961e9db {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.198332] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b11ae4e-c7d4-4c1a-9514-fa918d50bf0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.210862] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2230.211307] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0b93acf63ee248d7975ecdb25f7b42b0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2230.218267] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b93acf63ee248d7975ecdb25f7b42b0 [ 2230.219104] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2230.221423] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 63e212dbb71344fbb3b573e1337177d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2230.231632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63e212dbb71344fbb3b573e1337177d5 [ 2230.232296] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2230.232472] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.230s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.232495] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2232.232872] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2232.232872] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2232.233351] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d012e6cb89ca4eceb07c4675cc295f15 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2232.248921] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d012e6cb89ca4eceb07c4675cc295f15 [ 2232.250915] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251066] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251199] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251327] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251449] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251572] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251693] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.251815] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2232.252071] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2232.252510] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2237.945481] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2237.946069] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 813ac65730534825aca04267a5938872 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2237.961679] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 813ac65730534825aca04267a5938872 [ 2254.187676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 4ffbe0c4c4e54024b29868de4155c02f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2254.196125] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ffbe0c4c4e54024b29868de4155c02f [ 2254.196835] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.386285] env[61649]: WARNING oslo_vmware.rw_handles [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2260.386285] env[61649]: ERROR oslo_vmware.rw_handles [ 2260.386285] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2260.388169] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2260.388413] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Copying Virtual Disk [datastore1] vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/fc40c0bf-2419-4963-b946-afa41f2092f6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2260.388723] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3ac635c-ee7c-457e-909a-1886fc3220e1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.397972] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2260.397972] env[61649]: value = "task-158315" [ 2260.397972] env[61649]: _type = "Task" [ 2260.397972] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.405467] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.908203] env[61649]: DEBUG oslo_vmware.exceptions [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2260.908482] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.909035] env[61649]: ERROR nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2260.909035] env[61649]: Faults: ['InvalidArgument'] [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Traceback (most recent call last): [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] yield resources [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self.driver.spawn(context, instance, image_meta, [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self._fetch_image_if_missing(context, vi) [ 2260.909035] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] image_cache(vi, tmp_image_ds_loc) [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] vm_util.copy_virtual_disk( [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] session._wait_for_task(vmdk_copy_task) [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] return self.wait_for_task(task_ref) [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] return evt.wait() [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] result = hub.switch() [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2260.909339] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] return self.greenlet.switch() [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self.f(*self.args, **self.kw) [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] raise exceptions.translate_fault(task_info.error) [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Faults: ['InvalidArgument'] [ 2260.909643] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] [ 2260.909643] env[61649]: INFO nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Terminating instance [ 2260.910924] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.911145] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2260.911380] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1be78b37-b5a7-4f99-a8d0-6e8f8627105f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.913610] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2260.913805] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2260.914484] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705b4cb6-a35b-46ec-86d0-12547313376d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.920914] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2260.921114] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c6167d9-ce48-47f7-95e3-38d22a3a3292 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.923140] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2260.923296] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2260.924231] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5158f1db-b4b6-4d26-af16-006c36f98ef1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.929170] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2260.929170] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e334e3-775b-bba6-158c-28d88842a663" [ 2260.929170] env[61649]: _type = "Task" [ 2260.929170] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.935888] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e334e3-775b-bba6-158c-28d88842a663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.987968] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2260.988226] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2260.988411] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleting the datastore file [datastore1] 8b1cd843-48b0-4e85-93fa-32ddd6e32883 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2260.988667] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f544528e-3b33-49be-9306-e90b615eed2e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.995228] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2260.995228] env[61649]: value = "task-158317" [ 2260.995228] env[61649]: _type = "Task" [ 2260.995228] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.002750] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.439261] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2261.439608] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2261.439700] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e7c6d55-5121-4cf6-95e5-bc9e25f9bc55 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.450689] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2261.450936] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Fetch image to [datastore1] vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2261.451067] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2261.451744] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9130ab35-fdf1-42ab-b70e-5c17bfeb6628 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.457921] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a64b81e-547e-4770-a1fe-15af163c99b1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.466235] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001cc424-7d4b-4402-8b97-ddffb84cb327 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.495812] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7e87ad-fce7-4dd7-8194-c032d9a01a29 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.505816] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3cb26d3e-cfb7-446d-989f-f9b99c0f64a4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.507379] env[61649]: DEBUG oslo_vmware.api [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078008} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2261.507601] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2261.507808] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2261.507986] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2261.508172] env[61649]: INFO nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2261.510174] env[61649]: DEBUG nova.compute.claims [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2261.510366] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.510586] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.512397] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 519d385b790043ffb141f63669a7500d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2261.528188] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2261.546453] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 519d385b790043ffb141f63669a7500d [ 2261.579768] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2261.642056] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2261.642460] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2261.702104] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126754be-3b15-4f8e-95f6-0108911a49d8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.709228] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57dd3be-0677-4015-8429-aaa397fc72e1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.738064] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d9b012-0070-41c0-9ec9-591bc4004f01 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.744424] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14732a08-baf6-432a-a80b-0ccff27c1ed3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.756653] env[61649]: DEBUG nova.compute.provider_tree [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2261.757119] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 721240781bc5438cbdb4cd006a146888 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2261.764371] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 721240781bc5438cbdb4cd006a146888 [ 2261.765209] env[61649]: DEBUG nova.scheduler.client.report [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2261.767325] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b868c93fa1cc450bb800bf43b4fcd441 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2261.777198] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b868c93fa1cc450bb800bf43b4fcd441 [ 2261.777826] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.267s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.778337] env[61649]: ERROR nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2261.778337] env[61649]: Faults: ['InvalidArgument'] [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Traceback (most recent call last): [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self.driver.spawn(context, instance, image_meta, [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self._fetch_image_if_missing(context, vi) [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] image_cache(vi, tmp_image_ds_loc) [ 2261.778337] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] vm_util.copy_virtual_disk( [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] session._wait_for_task(vmdk_copy_task) [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] return self.wait_for_task(task_ref) [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] return evt.wait() [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] result = hub.switch() [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] return self.greenlet.switch() [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2261.778614] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] self.f(*self.args, **self.kw) [ 2261.778900] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2261.778900] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] raise exceptions.translate_fault(task_info.error) [ 2261.778900] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2261.778900] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Faults: ['InvalidArgument'] [ 2261.778900] env[61649]: ERROR nova.compute.manager [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] [ 2261.779013] env[61649]: DEBUG nova.compute.utils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2261.780326] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Build of instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 was re-scheduled: A specified parameter was not correct: fileType [ 2261.780326] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2261.780699] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2261.780873] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2261.781119] env[61649]: DEBUG nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2261.781302] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2261.986220] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d1f63fdcddd941fd88941c7835b645b9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2261.993873] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1f63fdcddd941fd88941c7835b645b9 [ 2261.995870] env[61649]: DEBUG nova.network.neutron [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2261.995870] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 3f65c9f17a1f4d78b8eac66086a85a69 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.003810] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f65c9f17a1f4d78b8eac66086a85a69 [ 2262.004093] env[61649]: INFO nova.compute.manager [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Took 0.22 seconds to deallocate network for instance. [ 2262.005699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 30e94f940dcf482e8e984dea5e50fdb3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.036038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30e94f940dcf482e8e984dea5e50fdb3 [ 2262.038112] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 138b255167d34f349094594e9f8a93ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.067172] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 138b255167d34f349094594e9f8a93ab [ 2262.087550] env[61649]: INFO nova.scheduler.client.report [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted allocations for instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 [ 2262.093843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 0ef77cbe93bd44bbb3524d8448946b6f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.104601] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ef77cbe93bd44bbb3524d8448946b6f [ 2262.105146] env[61649]: DEBUG oslo_concurrency.lockutils [None req-955d8633-2c1e-486f-951d-de28f8d8a9b7 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.355s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.105362] env[61649]: DEBUG oslo_concurrency.lockutils [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.166s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2262.105578] env[61649]: DEBUG oslo_concurrency.lockutils [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2262.105775] env[61649]: DEBUG oslo_concurrency.lockutils [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2262.105934] env[61649]: DEBUG oslo_concurrency.lockutils [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.107870] env[61649]: INFO nova.compute.manager [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Terminating instance [ 2262.109823] env[61649]: DEBUG nova.compute.manager [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2262.109823] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2262.110492] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dca5b5a-0b51-4ac3-be59-293740791efd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.120304] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c78cccf-6805-4f8f-ad5e-77c1250d1d13 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.148042] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8b1cd843-48b0-4e85-93fa-32ddd6e32883 could not be found. [ 2262.148042] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2262.148042] env[61649]: INFO nova.compute.manager [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2262.148042] env[61649]: DEBUG oslo.service.loopingcall [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2262.148398] env[61649]: DEBUG nova.compute.manager [-] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2262.148454] env[61649]: DEBUG nova.network.neutron [-] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2262.167774] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 898e645d7fe34a39ac7e115c09132b30 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.173481] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898e645d7fe34a39ac7e115c09132b30 [ 2262.173876] env[61649]: DEBUG nova.network.neutron [-] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2262.174263] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ccc35d7fed60429eb5a4caaef2aff44b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.182243] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccc35d7fed60429eb5a4caaef2aff44b [ 2262.182810] env[61649]: INFO nova.compute.manager [-] [instance: 8b1cd843-48b0-4e85-93fa-32ddd6e32883] Took 0.03 seconds to deallocate network for instance. [ 2262.186287] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 9a1163ddceb44ae9803877bdb84e019b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.215259] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a1163ddceb44ae9803877bdb84e019b [ 2262.231532] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7c8760caaaf24cc981338be16c26cc88 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.267346] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c8760caaaf24cc981338be16c26cc88 [ 2262.270645] env[61649]: DEBUG oslo_concurrency.lockutils [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "8b1cd843-48b0-4e85-93fa-32ddd6e32883" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.270989] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-291e190b-9ba3-417e-88ac-a5ac01d0c987 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b4604e593a154b8a9f1d3f5e74dd053c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2262.280622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4604e593a154b8a9f1d3f5e74dd053c [ 2281.929697] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2281.930099] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2281.930141] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2284.931356] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2288.924634] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.929897] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2289.930190] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2290.929667] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2290.930114] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2f82ba4d8d5c4dcd98fdf110446d6a41 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2290.939228] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f82ba4d8d5c4dcd98fdf110446d6a41 [ 2290.940269] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2290.940485] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2290.940650] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.940816] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2290.941905] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a481fb-dea1-4ab4-ba38-1d5879f52653 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.951307] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a088c5f-4e30-4c42-91ef-44f030793e84 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.965886] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a253ac-1715-4a34-ba20-0dc5ebf5fd98 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.971786] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc80d058-f98b-4105-805a-aafc950ce221 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.999333] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181842MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2290.999467] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2290.999653] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2291.000483] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f0cf8cec600443629cffe53ddd7fa414 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2291.026084] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0cf8cec600443629cffe53ddd7fa414 [ 2291.029057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 28f7992984334f2b9b773428b9c8a977 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2291.036886] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28f7992984334f2b9b773428b9c8a977 [ 2291.054052] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054052] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054052] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054052] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054229] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054229] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054229] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2291.054229] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2291.054336] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2291.142415] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a274f8-9ba9-45e2-a87d-ff000b369e5b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.149909] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28bcb95-23f2-49f4-9942-50b9af5ce587 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.180030] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f15a16d-d6eb-4385-ab89-4b991d26269a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.187064] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe43f961-2e83-4354-b4c9-9b9b5ec9c9b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.200033] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2291.200499] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1369b7cb7a7f4789b07f25eea1a96ede in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2291.207590] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1369b7cb7a7f4789b07f25eea1a96ede [ 2291.208467] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2291.210630] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1d5b67398a1943c0b51ad03df8b73360 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2291.220967] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d5b67398a1943c0b51ad03df8b73360 [ 2291.221599] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2291.221775] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.222s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.222069] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2292.222373] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2292.222420] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2292.223013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 725b9054b0f94507a9217badc0395bb9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2292.237321] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 725b9054b0f94507a9217badc0395bb9 [ 2292.239173] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.239310] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.239438] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.239559] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.239677] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.239830] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.239957] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2292.240086] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2292.240567] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2298.128413] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2298.128721] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2298.129221] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 5bfee2b93fbe49d38a806d914caa7b40 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.137459] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bfee2b93fbe49d38a806d914caa7b40 [ 2298.137965] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2298.139649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 163c908e272b4af1ae6a2d0b5fa00a65 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.170311] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 163c908e272b4af1ae6a2d0b5fa00a65 [ 2298.185393] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2298.185493] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2298.186937] env[61649]: INFO nova.compute.claims [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2298.188760] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 54f2010d76084f2e9c69db609d33b956 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.221615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54f2010d76084f2e9c69db609d33b956 [ 2298.223263] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg f8260a1753574e68b04bb04c2a16969d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.230370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8260a1753574e68b04bb04c2a16969d [ 2298.329349] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d36e645-d8c9-435d-afe0-60a89f3c5df3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.337039] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8846bd3-4581-4ba7-8840-82c7086ec0b7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.367098] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89796c9-0b33-410d-a7d4-6a0bfc77b29d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.373663] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e118a1f-8f6f-40e3-bcd4-10648e5d5edd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.385927] env[61649]: DEBUG nova.compute.provider_tree [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2298.386390] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg d0a43776b28e4fada37db0e452be2e5d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.393821] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0a43776b28e4fada37db0e452be2e5d [ 2298.394638] env[61649]: DEBUG nova.scheduler.client.report [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2298.396793] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 7f234ff24b7e4b51b4f19be7839039fd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.406619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f234ff24b7e4b51b4f19be7839039fd [ 2298.407231] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.222s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2298.407705] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2298.409455] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 998b8356c127453aa21ed90b9d4134d9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.439130] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 998b8356c127453aa21ed90b9d4134d9 [ 2298.440610] env[61649]: DEBUG nova.compute.utils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2298.441170] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 1907baee6f224d5f87f514ca43356f2f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.442083] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2298.442252] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2298.450845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1907baee6f224d5f87f514ca43356f2f [ 2298.451425] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2298.453092] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 3fef59abf0a84bb6825f0a57f1fbee9a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.480611] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fef59abf0a84bb6825f0a57f1fbee9a [ 2298.483357] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg c7c5db90bc094158a42e05c47962d8ea in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2298.485482] env[61649]: DEBUG nova.policy [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47b5978abdfb4e288e317fa53fb7a54b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be725bf46ff647018ed76001b586f633', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2298.511558] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7c5db90bc094158a42e05c47962d8ea [ 2298.512688] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2298.532948] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2298.533178] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2298.533340] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2298.533521] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2298.533670] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2298.533818] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2298.534021] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2298.534184] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2298.534392] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2298.534673] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2298.534827] env[61649]: DEBUG nova.virt.hardware [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2298.535661] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595230fe-6642-4e5c-b88d-0f943d7ec161 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.543609] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451ca757-3131-408e-9b86-578d203e015c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.824084] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Successfully created port: ff44b4d1-3f73-4988-b517-a957f502f17d {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2299.330026] env[61649]: DEBUG nova.compute.manager [req-ebf1d7a2-56ba-4fc8-b86c-b2c4e4e7c7b4 req-ff80ae4f-1e88-43c3-b3f8-e395bb66ef34 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Received event network-vif-plugged-ff44b4d1-3f73-4988-b517-a957f502f17d {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2299.330303] env[61649]: DEBUG oslo_concurrency.lockutils [req-ebf1d7a2-56ba-4fc8-b86c-b2c4e4e7c7b4 req-ff80ae4f-1e88-43c3-b3f8-e395bb66ef34 service nova] Acquiring lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2299.330461] env[61649]: DEBUG oslo_concurrency.lockutils [req-ebf1d7a2-56ba-4fc8-b86c-b2c4e4e7c7b4 req-ff80ae4f-1e88-43c3-b3f8-e395bb66ef34 service nova] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2299.330624] env[61649]: DEBUG oslo_concurrency.lockutils [req-ebf1d7a2-56ba-4fc8-b86c-b2c4e4e7c7b4 req-ff80ae4f-1e88-43c3-b3f8-e395bb66ef34 service nova] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2299.330820] env[61649]: DEBUG nova.compute.manager [req-ebf1d7a2-56ba-4fc8-b86c-b2c4e4e7c7b4 req-ff80ae4f-1e88-43c3-b3f8-e395bb66ef34 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] No waiting events found dispatching network-vif-plugged-ff44b4d1-3f73-4988-b517-a957f502f17d {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2299.330997] env[61649]: WARNING nova.compute.manager [req-ebf1d7a2-56ba-4fc8-b86c-b2c4e4e7c7b4 req-ff80ae4f-1e88-43c3-b3f8-e395bb66ef34 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Received unexpected event network-vif-plugged-ff44b4d1-3f73-4988-b517-a957f502f17d for instance with vm_state building and task_state spawning. [ 2299.346427] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Successfully updated port: ff44b4d1-3f73-4988-b517-a957f502f17d {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2299.347498] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg dd01b1af60d042629f77c83889a0d83b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2299.353884] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd01b1af60d042629f77c83889a0d83b [ 2299.354475] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "refresh_cache-ed66fa83-b203-4c7a-b1e5-d00547fa46c9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2299.354609] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired lock "refresh_cache-ed66fa83-b203-4c7a-b1e5-d00547fa46c9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2299.354751] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2299.355127] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg e74e45a92b824c068fd7d9da81a880dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2299.361902] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e74e45a92b824c068fd7d9da81a880dc [ 2299.400069] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2299.529262] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Updating instance_info_cache with network_info: [{"id": "ff44b4d1-3f73-4988-b517-a957f502f17d", "address": "fa:16:3e:02:55:b0", "network": {"id": "9c11ca0c-6674-4818-854b-4843c98e2edb", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1068587008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be725bf46ff647018ed76001b586f633", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff44b4d1-3f", "ovs_interfaceid": "ff44b4d1-3f73-4988-b517-a957f502f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2299.529855] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 5d15b799206947929e40c66681d74d03 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2299.541983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d15b799206947929e40c66681d74d03 [ 2299.542554] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Releasing lock "refresh_cache-ed66fa83-b203-4c7a-b1e5-d00547fa46c9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2299.542829] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance network_info: |[{"id": "ff44b4d1-3f73-4988-b517-a957f502f17d", "address": "fa:16:3e:02:55:b0", "network": {"id": "9c11ca0c-6674-4818-854b-4843c98e2edb", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1068587008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be725bf46ff647018ed76001b586f633", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff44b4d1-3f", "ovs_interfaceid": "ff44b4d1-3f73-4988-b517-a957f502f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2299.543225] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:55:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '683a619f-b10d-41a3-8c03-4f69f6c9ce53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff44b4d1-3f73-4988-b517-a957f502f17d', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2299.550682] env[61649]: DEBUG oslo.service.loopingcall [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2299.551161] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2299.551389] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4d4fc13-843d-49c3-8255-517d49ceb364 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.571129] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2299.571129] env[61649]: value = "task-158318" [ 2299.571129] env[61649]: _type = "Task" [ 2299.571129] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2299.581050] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158318, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2300.081441] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158318, 'name': CreateVM_Task, 'duration_secs': 0.318743} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2300.081620] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2300.082294] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2300.082453] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2300.082788] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2300.083035] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4a276f3-de33-4eeb-9fc0-4e1a86274433 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.087746] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2300.087746] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]520f7b69-6220-f8a1-94a1-ad7ff7ea5381" [ 2300.087746] env[61649]: _type = "Task" [ 2300.087746] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2300.101519] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]520f7b69-6220-f8a1-94a1-ad7ff7ea5381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2300.597809] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2300.598168] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2300.598256] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2301.570065] env[61649]: DEBUG nova.compute.manager [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Received event network-changed-ff44b4d1-3f73-4988-b517-a957f502f17d {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2301.570254] env[61649]: DEBUG nova.compute.manager [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Refreshing instance network info cache due to event network-changed-ff44b4d1-3f73-4988-b517-a957f502f17d. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2301.570459] env[61649]: DEBUG oslo_concurrency.lockutils [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] Acquiring lock "refresh_cache-ed66fa83-b203-4c7a-b1e5-d00547fa46c9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2301.570611] env[61649]: DEBUG oslo_concurrency.lockutils [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] Acquired lock "refresh_cache-ed66fa83-b203-4c7a-b1e5-d00547fa46c9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2301.570766] env[61649]: DEBUG nova.network.neutron [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Refreshing network info cache for port ff44b4d1-3f73-4988-b517-a957f502f17d {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2301.571249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] Expecting reply to msg 725c7a8d700543d197f764d85c860320 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2301.578119] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 725c7a8d700543d197f764d85c860320 [ 2301.806530] env[61649]: DEBUG nova.network.neutron [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Updated VIF entry in instance network info cache for port ff44b4d1-3f73-4988-b517-a957f502f17d. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2301.806882] env[61649]: DEBUG nova.network.neutron [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Updating instance_info_cache with network_info: [{"id": "ff44b4d1-3f73-4988-b517-a957f502f17d", "address": "fa:16:3e:02:55:b0", "network": {"id": "9c11ca0c-6674-4818-854b-4843c98e2edb", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1068587008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be725bf46ff647018ed76001b586f633", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff44b4d1-3f", "ovs_interfaceid": "ff44b4d1-3f73-4988-b517-a957f502f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2301.807391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] Expecting reply to msg 50b8dda309cf4856a2836fa929fa27d6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2301.815827] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50b8dda309cf4856a2836fa929fa27d6 [ 2301.816385] env[61649]: DEBUG oslo_concurrency.lockutils [req-e769f4e9-42e0-491a-a72a-1d0587433954 req-2b300a4c-c8b9-4b75-88b9-30dc66a38da1 service nova] Releasing lock "refresh_cache-ed66fa83-b203-4c7a-b1e5-d00547fa46c9" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2310.403426] env[61649]: WARNING oslo_vmware.rw_handles [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2310.403426] env[61649]: ERROR oslo_vmware.rw_handles [ 2310.404138] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2310.405737] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2310.405987] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Copying Virtual Disk [datastore1] vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/01565041-b701-40b0-a2cf-37cc66e9c708/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2310.406261] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-045ff2b3-7ee1-4e73-812d-7f90563f3b46 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.414273] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2310.414273] env[61649]: value = "task-158319" [ 2310.414273] env[61649]: _type = "Task" [ 2310.414273] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.422726] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.924639] env[61649]: DEBUG oslo_vmware.exceptions [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2310.924918] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2310.925481] env[61649]: ERROR nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2310.925481] env[61649]: Faults: ['InvalidArgument'] [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Traceback (most recent call last): [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] yield resources [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self.driver.spawn(context, instance, image_meta, [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self._fetch_image_if_missing(context, vi) [ 2310.925481] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] image_cache(vi, tmp_image_ds_loc) [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] vm_util.copy_virtual_disk( [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] session._wait_for_task(vmdk_copy_task) [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] return self.wait_for_task(task_ref) [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] return evt.wait() [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] result = hub.switch() [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2310.925914] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] return self.greenlet.switch() [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self.f(*self.args, **self.kw) [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] raise exceptions.translate_fault(task_info.error) [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Faults: ['InvalidArgument'] [ 2310.926266] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] [ 2310.926266] env[61649]: INFO nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Terminating instance [ 2310.927366] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2310.927588] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2310.927866] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-172abb8a-84e4-49ee-939f-05d236873975 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.930331] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2310.930522] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2310.931261] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2665d310-4d7e-4892-868e-b537bf605408 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.938305] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2310.938517] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f40e4e0-8f7e-4861-ae47-ad1787185ed9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.940678] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2310.940853] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2310.941779] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bfd0ca5-d429-4fd5-975b-22cdcd0b8404 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.946784] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2310.946784] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52d51d6f-dc8e-f6e6-eb8b-a42b87ff12dd" [ 2310.946784] env[61649]: _type = "Task" [ 2310.946784] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.953870] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52d51d6f-dc8e-f6e6-eb8b-a42b87ff12dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.996395] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2310.996611] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2310.996798] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleting the datastore file [datastore1] d8503feb-d1df-4e1f-8357-e080e8bdb174 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2310.997066] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-784b36da-d4a4-4569-ab7b-651c03270a53 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.002786] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2311.002786] env[61649]: value = "task-158321" [ 2311.002786] env[61649]: _type = "Task" [ 2311.002786] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.010243] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.458287] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2311.458586] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.458793] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-491eec1b-b593-4354-8107-a6b50af8be56 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.471384] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.471384] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Fetch image to [datastore1] vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2311.471384] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2311.471384] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881103f8-a5ca-43de-bee6-201cafe839ea {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.477133] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5d437b-8b21-4cb5-b42f-4e778257bfe2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.485815] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045f31d2-87b5-4ecc-ae53-a03eda8235a3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.519178] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428c5ecf-361b-44f2-ba04-fd2e9166d829 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.526028] env[61649]: DEBUG oslo_vmware.api [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073507} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.527450] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2311.527637] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2311.527814] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2311.527986] env[61649]: INFO nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2311.529757] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-239d3df8-cf22-467d-9a03-419baeaa18b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.531627] env[61649]: DEBUG nova.compute.claims [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2311.531801] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.532016] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.533871] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b4b57f2a52b8457abea7b2105113bc5f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2311.553573] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2311.564959] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4b57f2a52b8457abea7b2105113bc5f [ 2311.601282] env[61649]: DEBUG oslo_vmware.rw_handles [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2311.662975] env[61649]: DEBUG oslo_vmware.rw_handles [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2311.663221] env[61649]: DEBUG oslo_vmware.rw_handles [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2311.713457] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a673c81-278e-4fe5-908c-b237e159a1b3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.720769] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b27e629-3785-446a-bff9-4699c1915219 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.750521] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c804d4-4ef1-4256-abe6-905a3ab53b36 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.757246] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6cb3ee-9d2b-4c5d-b495-79f6eadd9d34 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.769784] env[61649]: DEBUG nova.compute.provider_tree [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2311.770278] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 0c8cc818b86f424ea97b1e197bd67690 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2311.777901] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c8cc818b86f424ea97b1e197bd67690 [ 2311.778782] env[61649]: DEBUG nova.scheduler.client.report [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2311.781013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 982998fa25394b77914f02c2d51667a0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2311.791132] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 982998fa25394b77914f02c2d51667a0 [ 2311.791785] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.260s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.792330] env[61649]: ERROR nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.792330] env[61649]: Faults: ['InvalidArgument'] [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Traceback (most recent call last): [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self.driver.spawn(context, instance, image_meta, [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self._fetch_image_if_missing(context, vi) [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] image_cache(vi, tmp_image_ds_loc) [ 2311.792330] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] vm_util.copy_virtual_disk( [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] session._wait_for_task(vmdk_copy_task) [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] return self.wait_for_task(task_ref) [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] return evt.wait() [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] result = hub.switch() [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] return self.greenlet.switch() [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2311.792711] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] self.f(*self.args, **self.kw) [ 2311.793052] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2311.793052] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] raise exceptions.translate_fault(task_info.error) [ 2311.793052] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.793052] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Faults: ['InvalidArgument'] [ 2311.793052] env[61649]: ERROR nova.compute.manager [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] [ 2311.793052] env[61649]: DEBUG nova.compute.utils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2311.794306] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Build of instance d8503feb-d1df-4e1f-8357-e080e8bdb174 was re-scheduled: A specified parameter was not correct: fileType [ 2311.794306] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2311.794678] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2311.794851] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2311.795021] env[61649]: DEBUG nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2311.795187] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2312.004057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 9e1173015f4f41fd9b1e2b3b1fbbf26a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.011005] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e1173015f4f41fd9b1e2b3b1fbbf26a [ 2312.011545] env[61649]: DEBUG nova.network.neutron [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.012057] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 703f4aa2992f414d9868008dfb83605a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.022855] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 703f4aa2992f414d9868008dfb83605a [ 2312.023028] env[61649]: INFO nova.compute.manager [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Took 0.23 seconds to deallocate network for instance. [ 2312.024751] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4187e85a0f10435a8b8226289eeb5cfb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.057838] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4187e85a0f10435a8b8226289eeb5cfb [ 2312.060983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 661fb23bf0ec4cd6862845d3c924b7de in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.091956] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 661fb23bf0ec4cd6862845d3c924b7de [ 2312.109805] env[61649]: INFO nova.scheduler.client.report [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted allocations for instance d8503feb-d1df-4e1f-8357-e080e8bdb174 [ 2312.115815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b87d51ab13e0480dbf0eef835553f1c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.133286] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b87d51ab13e0480dbf0eef835553f1c7 [ 2312.133833] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9de0c698-82a0-47bb-ae44-2c18aaca2ca1 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 556.098s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.134735] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 359.819s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.134975] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "d8503feb-d1df-4e1f-8357-e080e8bdb174-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.135185] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.135359] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.137560] env[61649]: INFO nova.compute.manager [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Terminating instance [ 2312.139663] env[61649]: DEBUG nova.compute.manager [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2312.139920] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2312.140198] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aba5f6a1-c231-4764-bf83-17dca74609a5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.149582] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d4e39a-93c1-4bf9-8782-ce5a20c7487a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.175143] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8503feb-d1df-4e1f-8357-e080e8bdb174 could not be found. [ 2312.175337] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2312.175515] env[61649]: INFO nova.compute.manager [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2312.175759] env[61649]: DEBUG oslo.service.loopingcall [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2312.176274] env[61649]: DEBUG nova.compute.manager [-] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2312.176364] env[61649]: DEBUG nova.network.neutron [-] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2312.193733] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a77b9c42e3254eb481e95ef2438240e9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.199887] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a77b9c42e3254eb481e95ef2438240e9 [ 2312.200264] env[61649]: DEBUG nova.network.neutron [-] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.200644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 38062400c574483fb3f496b5c268e2b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.207405] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38062400c574483fb3f496b5c268e2b7 [ 2312.207833] env[61649]: INFO nova.compute.manager [-] [instance: d8503feb-d1df-4e1f-8357-e080e8bdb174] Took 0.03 seconds to deallocate network for instance. [ 2312.211203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 48f225478eb4425a80119aa6a1bd9b5a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.234623] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48f225478eb4425a80119aa6a1bd9b5a [ 2312.250370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 508cb861a8f24f5e9afc5529855c232f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.284071] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 508cb861a8f24f5e9afc5529855c232f [ 2312.286686] env[61649]: DEBUG oslo_concurrency.lockutils [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "d8503feb-d1df-4e1f-8357-e080e8bdb174" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.153s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.287013] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-7874eed7-c264-4dfe-ba18-15b5a0a7b0d6 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 882d7aa9038e4eb6bacfdd3cf8c55442 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2312.297494] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 882d7aa9038e4eb6bacfdd3cf8c55442 [ 2328.000643] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.001168] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 2328.001168] env[61649]: value = "domain-c8" [ 2328.001168] env[61649]: _type = "ClusterComputeResource" [ 2328.001168] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2328.002232] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0178ced2-477a-475f-a16d-2813e4d9f69e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.016704] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 7 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2328.017274] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ad713f0d15da411c8cf336584f160cd4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2328.031597] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad713f0d15da411c8cf336584f160cd4 [ 2330.993247] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 289462d2655148a68ee320856419f039 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2331.002557] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 289462d2655148a68ee320856419f039 [ 2341.961821] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.962189] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2341.978734] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.979301] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4baa2cc5944d4d9b85ec04d7760640c1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2341.994203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4baa2cc5944d4d9b85ec04d7760640c1 [ 2341.996431] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 2341.996431] env[61649]: value = "domain-c8" [ 2341.996431] env[61649]: _type = "ClusterComputeResource" [ 2341.996431] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2341.997619] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3616a46d-15cb-44c1-ad15-5f4dcf21d87b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.011992] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 7 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2342.012231] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid f0e69971-df47-4ef0-85c9-ac686e4a4f9d {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.012429] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 4d429147-d3fe-4d99-af2a-e28a3829f434 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.012589] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 8295f484-2065-4a21-bdec-7d38e98f93e7 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.012743] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid da4cbfc3-cf43-4cf6-b391-d7183699e58d {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.012897] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 56652181-0379-4532-9b2a-e6138cbd73ad {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.013052] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 09dcd3bd-1baa-4276-b8c5-64de3de036f2 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.013201] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid ed66fa83-b203-4c7a-b1e5-d00547fa46c9 {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2342.013492] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.013716] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "4d429147-d3fe-4d99-af2a-e28a3829f434" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.013916] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "8295f484-2065-4a21-bdec-7d38e98f93e7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.014113] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.014307] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "56652181-0379-4532-9b2a-e6138cbd73ad" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.014498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.014688] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.966019] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.929281] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.974775] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d6fa12e2e78749bba990d8160eb92c3d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2345.984925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6fa12e2e78749bba990d8160eb92c3d [ 2345.985450] env[61649]: DEBUG oslo_concurrency.lockutils [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "56652181-0379-4532-9b2a-e6138cbd73ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.923865] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2350.928515] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2350.928683] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 2350.929029] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f3d1b27eaf4649d684230663195f9fef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2350.938081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3d1b27eaf4649d684230663195f9fef [ 2351.939991] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.940311] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2351.940357] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2351.940946] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg c573b2071fba4423b7eae2ea0d489d12 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2351.955010] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c573b2071fba4423b7eae2ea0d489d12 [ 2351.956827] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.956969] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.957098] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.957220] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.957335] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.957449] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.957563] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2351.957678] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2351.958134] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.958309] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.958457] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.958765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 01af7aa4aff34f798bfde3974e8c304e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2351.967050] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01af7aa4aff34f798bfde3974e8c304e [ 2351.967928] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2351.968170] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2351.968335] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2351.968495] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2351.969555] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0a8f82-9c24-4834-b29f-12841987c898 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.978304] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f647b0e-ed37-4cf1-92ac-0814239c05fe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.992051] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de329914-3a25-4d2b-8944-cd4f5a2a46f6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.998153] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92673f42-a865-4bb9-8e13-5bcf134d05d0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.027185] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181843MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2352.027360] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2352.027561] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2352.028422] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg eb54c6c3d5624b81b3f272275c607653 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2352.053424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb54c6c3d5624b81b3f272275c607653 [ 2352.056628] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d1d9f600130c4d47b65ad90a53f382f5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2352.064942] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1d9f600130c4d47b65ad90a53f382f5 [ 2352.080828] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081016] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081151] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081275] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081393] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081510] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081627] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2352.081803] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2352.082004] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2352.170593] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477dae48-b09d-48f6-858a-ef79850e5503 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.178780] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9edc8e6-430c-4c8f-95eb-718a303e9266 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.208320] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce2870a-bac2-491a-84fc-b9d7e8500989 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.215077] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d38447b-a65b-4256-80a0-641b64521068 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.227456] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2352.227907] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a448f532851949e5b047a2d634d41985 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2352.235411] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a448f532851949e5b047a2d634d41985 [ 2352.236280] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2352.238401] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2c32bc3666d7418b962765052573de93 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2352.250908] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c32bc3666d7418b962765052573de93 [ 2352.251536] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2352.251710] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.224s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.222449] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2357.930958] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2357.931327] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 773de68cbf884016bc27af8b86393ef0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2357.944175] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 773de68cbf884016bc27af8b86393ef0 [ 2358.937763] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2358.938373] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a27aa7f1f06a457fa463d4625ef0872f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2358.953106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a27aa7f1f06a457fa463d4625ef0872f [ 2361.365584] env[61649]: WARNING oslo_vmware.rw_handles [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2361.365584] env[61649]: ERROR oslo_vmware.rw_handles [ 2361.366478] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2361.367918] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2361.368196] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Copying Virtual Disk [datastore1] vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/91cd361e-8007-4be7-a19b-b918872db887/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2361.368472] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89c4d870-08db-43cf-8b21-7623a0da8b1d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.376187] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2361.376187] env[61649]: value = "task-158322" [ 2361.376187] env[61649]: _type = "Task" [ 2361.376187] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.384657] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.885706] env[61649]: DEBUG oslo_vmware.exceptions [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2361.886996] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2361.887611] env[61649]: ERROR nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2361.887611] env[61649]: Faults: ['InvalidArgument'] [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Traceback (most recent call last): [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] yield resources [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self.driver.spawn(context, instance, image_meta, [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self._fetch_image_if_missing(context, vi) [ 2361.887611] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] image_cache(vi, tmp_image_ds_loc) [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] vm_util.copy_virtual_disk( [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] session._wait_for_task(vmdk_copy_task) [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] return self.wait_for_task(task_ref) [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] return evt.wait() [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] result = hub.switch() [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2361.888177] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] return self.greenlet.switch() [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self.f(*self.args, **self.kw) [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] raise exceptions.translate_fault(task_info.error) [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Faults: ['InvalidArgument'] [ 2361.888709] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] [ 2361.888709] env[61649]: INFO nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Terminating instance [ 2361.889580] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2361.889822] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2361.890057] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-662162f4-e405-445b-a6e2-d8bbe0257993 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.892361] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2361.892556] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2361.893298] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6f6e4f-fa4e-4c79-b8b5-5f7ed1a04c51 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.899699] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2361.899957] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c98b31f2-636a-4b88-ba16-c1cd5a9b5277 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.901979] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2361.902154] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2361.903070] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa312d61-fada-4afc-9cfe-acd200f6b60f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.907743] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2361.907743] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52126e9c-d4eb-4d22-5e69-db29706c956a" [ 2361.907743] env[61649]: _type = "Task" [ 2361.907743] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.920969] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52126e9c-d4eb-4d22-5e69-db29706c956a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.965822] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2361.966124] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2361.966236] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleting the datastore file [datastore1] f0e69971-df47-4ef0-85c9-ac686e4a4f9d {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2361.966507] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6677642-370a-40a8-a8d4-4842f7e18125 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.972899] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2361.972899] env[61649]: value = "task-158324" [ 2361.972899] env[61649]: _type = "Task" [ 2361.972899] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.980550] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.418358] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2362.418690] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating directory with path [datastore1] vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2362.418815] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfe482f6-a527-4edf-af59-409e50df78ee {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.429303] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Created directory with path [datastore1] vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2362.429489] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Fetch image to [datastore1] vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2362.429658] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2362.430384] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76a1207-1886-4430-aa8c-a86d57839ac6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.436726] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d508e56d-51eb-46f2-a792-03895eb412bf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.445204] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546c8157-e79c-498e-8c68-df3d2aab2ba1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.477162] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427537c1-395b-4a52-b3b0-e745439b5ecf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.484919] env[61649]: DEBUG oslo_vmware.api [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068168} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2362.486289] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2362.486855] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2362.487064] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2362.487247] env[61649]: INFO nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2362.489533] env[61649]: DEBUG nova.compute.claims [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2362.489706] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.489932] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.491942] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 141079b293d94e1d8b45498c9d06ba56 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2362.493028] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d15bfb27-d662-4d0b-ba2c-e10bc4080327 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.515518] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2362.525066] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 141079b293d94e1d8b45498c9d06ba56 [ 2362.561764] env[61649]: DEBUG oslo_vmware.rw_handles [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2362.622943] env[61649]: DEBUG oslo_vmware.rw_handles [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2362.623128] env[61649]: DEBUG oslo_vmware.rw_handles [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2362.665732] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2718d9a-e393-4ea7-9766-395d678c4884 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.673952] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcc5a81-adc1-46c6-888f-af8a6a2d9656 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.704359] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d35673-bb9f-4e78-85aa-78acc0d2db33 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.711661] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d5cfe6-29cf-45d4-a295-3b9edba33522 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.724383] env[61649]: DEBUG nova.compute.provider_tree [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2362.724869] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg f8b03f83482941f482c271f8d9b24687 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2362.732450] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8b03f83482941f482c271f8d9b24687 [ 2362.733355] env[61649]: DEBUG nova.scheduler.client.report [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2362.735566] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 9d9044a193b946c09a3be45c89b8ef41 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2362.747143] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d9044a193b946c09a3be45c89b8ef41 [ 2362.747143] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.257s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.747507] env[61649]: ERROR nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.747507] env[61649]: Faults: ['InvalidArgument'] [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Traceback (most recent call last): [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self.driver.spawn(context, instance, image_meta, [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self._fetch_image_if_missing(context, vi) [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] image_cache(vi, tmp_image_ds_loc) [ 2362.747507] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] vm_util.copy_virtual_disk( [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] session._wait_for_task(vmdk_copy_task) [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] return self.wait_for_task(task_ref) [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] return evt.wait() [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] result = hub.switch() [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] return self.greenlet.switch() [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2362.747853] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] self.f(*self.args, **self.kw) [ 2362.748299] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2362.748299] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] raise exceptions.translate_fault(task_info.error) [ 2362.748299] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.748299] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Faults: ['InvalidArgument'] [ 2362.748299] env[61649]: ERROR nova.compute.manager [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] [ 2362.748299] env[61649]: DEBUG nova.compute.utils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2362.751075] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Build of instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d was re-scheduled: A specified parameter was not correct: fileType [ 2362.751075] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2362.751472] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2362.751647] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2362.751819] env[61649]: DEBUG nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2362.751984] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2362.978831] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg a105b664ccf94a43acbe4cef37523efd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2362.987170] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a105b664ccf94a43acbe4cef37523efd [ 2362.987702] env[61649]: DEBUG nova.network.neutron [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.988262] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 0139081aeb474e33bae07364dd2e8c73 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.000432] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0139081aeb474e33bae07364dd2e8c73 [ 2363.001082] env[61649]: INFO nova.compute.manager [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Took 0.25 seconds to deallocate network for instance. [ 2363.002710] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 69948885d1a345f4b21401a2004cb4b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.034294] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69948885d1a345f4b21401a2004cb4b7 [ 2363.037223] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 555c419388674e3db1b5e370fb2bea68 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.069275] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 555c419388674e3db1b5e370fb2bea68 [ 2363.089375] env[61649]: INFO nova.scheduler.client.report [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted allocations for instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d [ 2363.095289] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg a6a3033442a248d8921b9ba5c44c9606 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.112497] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6a3033442a248d8921b9ba5c44c9606 [ 2363.113292] env[61649]: DEBUG oslo_concurrency.lockutils [None req-5d8425da-455f-418d-8b4d-6d45c137204c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 547.896s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.113292] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 352.729s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.113466] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2363.113625] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.113786] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.116027] env[61649]: INFO nova.compute.manager [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Terminating instance [ 2363.117800] env[61649]: DEBUG nova.compute.manager [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2363.118041] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2363.118525] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22817240-5e70-4802-ab58-880eed01cc7e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.127943] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00c76f3-c1f5-4c88-864e-9c4327e02206 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.154459] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f0e69971-df47-4ef0-85c9-ac686e4a4f9d could not be found. [ 2363.154591] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2363.154781] env[61649]: INFO nova.compute.manager [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2363.155038] env[61649]: DEBUG oslo.service.loopingcall [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2363.155534] env[61649]: DEBUG nova.compute.manager [-] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2363.155637] env[61649]: DEBUG nova.network.neutron [-] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2363.171483] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d8bebb3f289e4fbe85607ff71fba559c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.176813] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8bebb3f289e4fbe85607ff71fba559c [ 2363.177190] env[61649]: DEBUG nova.network.neutron [-] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2363.177581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dc0f8a79ea9341438447e2f493c97f3e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.205784] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc0f8a79ea9341438447e2f493c97f3e [ 2363.206345] env[61649]: INFO nova.compute.manager [-] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] Took 0.05 seconds to deallocate network for instance. [ 2363.214002] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 98dbbdba375b4738ae0532f7049e9441 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.238510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98dbbdba375b4738ae0532f7049e9441 [ 2363.252178] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 396c57d4cc004aa1850a0a5be90af444 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.284424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 396c57d4cc004aa1850a0a5be90af444 [ 2363.287119] env[61649]: DEBUG oslo_concurrency.lockutils [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.287435] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-b019443b-e4eb-4c5b-a467-281cc76e9a13 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg b8bbbdee0ab7478bb1b51cfc0479a6f0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.288695] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.275s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.288895] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: f0e69971-df47-4ef0-85c9-ac686e4a4f9d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2363.289071] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "f0e69971-df47-4ef0-85c9-ac686e4a4f9d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.296065] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8bbbdee0ab7478bb1b51cfc0479a6f0 [ 2363.931139] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.931139] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 2363.931139] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d9fdb7e9788746e98458f757ba681ed0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2363.938669] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9fdb7e9788746e98458f757ba681ed0 [ 2363.939174] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 2402.589249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 7a832ef996464ac19660415d3ef7b1e9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2402.597038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a832ef996464ac19660415d3ef7b1e9 [ 2402.597489] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.939373] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.939676] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.939913] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2406.929739] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2410.497906] env[61649]: WARNING oslo_vmware.rw_handles [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2410.497906] env[61649]: ERROR oslo_vmware.rw_handles [ 2410.498855] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2410.500715] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2410.501021] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Copying Virtual Disk [datastore1] vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/d2cf3ccd-d176-45f7-95d0-0ac8e7d970a3/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2410.501336] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f81e4aa-7474-48a6-8ee2-50230dfd86a7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.509544] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2410.509544] env[61649]: value = "task-158325" [ 2410.509544] env[61649]: _type = "Task" [ 2410.509544] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.517601] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': task-158325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.019451] env[61649]: DEBUG oslo_vmware.exceptions [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2411.020062] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2411.020758] env[61649]: ERROR nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.020758] env[61649]: Faults: ['InvalidArgument'] [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Traceback (most recent call last): [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] yield resources [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self.driver.spawn(context, instance, image_meta, [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self._fetch_image_if_missing(context, vi) [ 2411.020758] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] image_cache(vi, tmp_image_ds_loc) [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] vm_util.copy_virtual_disk( [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] session._wait_for_task(vmdk_copy_task) [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] return self.wait_for_task(task_ref) [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] return evt.wait() [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] result = hub.switch() [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2411.021116] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] return self.greenlet.switch() [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self.f(*self.args, **self.kw) [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] raise exceptions.translate_fault(task_info.error) [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Faults: ['InvalidArgument'] [ 2411.021438] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] [ 2411.022005] env[61649]: INFO nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Terminating instance [ 2411.023782] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2411.024118] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2411.024823] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2411.025137] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2411.025469] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14bafa64-c50a-44de-836b-ea43df2daccb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.027692] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ccdd9c-5043-4707-8fdd-2747fc4f2e1b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.034982] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2411.036121] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6eb5f7f-39d7-4bc8-997f-ed1872c04ab8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.037551] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2411.037855] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2411.038684] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5299d0fa-df2d-42cc-8c04-7da29950e11b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.043544] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 2411.043544] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522d68d8-d4ac-214c-2b67-30cc62286f66" [ 2411.043544] env[61649]: _type = "Task" [ 2411.043544] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.050660] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522d68d8-d4ac-214c-2b67-30cc62286f66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.103174] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2411.103630] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2411.103955] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Deleting the datastore file [datastore1] 4d429147-d3fe-4d99-af2a-e28a3829f434 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2411.104328] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-878f10e8-aec1-4818-8059-0505d117f9bc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.110431] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2411.110431] env[61649]: value = "task-158327" [ 2411.110431] env[61649]: _type = "Task" [ 2411.110431] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.117648] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': task-158327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.553726] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2411.553966] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating directory with path [datastore1] vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2411.554185] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f476f03c-1238-4401-9945-d4196643a732 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.566183] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Created directory with path [datastore1] vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2411.566183] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Fetch image to [datastore1] vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2411.566183] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2411.566721] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b43b57-1ef4-4e95-8941-e0fffd4d41bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.573373] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2bd9a1-46f1-4557-93ef-3b03969d4d94 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.582172] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562d4e7e-51e7-48d7-a254-1cc4c9ea87fa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.616703] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd75b65e-017d-426f-94a5-bcfa3486b124 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.623606] env[61649]: DEBUG oslo_vmware.api [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': task-158327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077538} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2411.624946] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2411.625140] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2411.625314] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2411.625496] env[61649]: INFO nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2411.627468] env[61649]: DEBUG nova.compute.claims [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2411.627644] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.627983] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.629831] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 82f171409f974888a7088a8546afe791 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2411.630655] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-680e4a36-f99a-42c8-9620-4401c2ecacac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.651767] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2411.666240] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82f171409f974888a7088a8546afe791 [ 2411.703649] env[61649]: DEBUG nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2411.717975] env[61649]: DEBUG nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2411.718228] env[61649]: DEBUG nova.compute.provider_tree [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2411.725637] env[61649]: DEBUG oslo_vmware.rw_handles [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2411.729149] env[61649]: DEBUG nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2411.787462] env[61649]: DEBUG oslo_vmware.rw_handles [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2411.787660] env[61649]: DEBUG oslo_vmware.rw_handles [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2411.797734] env[61649]: DEBUG nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2411.871902] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddf757b-7686-4300-a0ab-8789fc791800 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.879147] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee10746-3eb2-4477-b9d6-72922dd36455 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.907660] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb34f9e1-1d98-4768-a47a-d1477ddd5fe3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.914224] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a9dd9f-e982-44f4-81f7-e5da44d92db7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.926302] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.926727] env[61649]: DEBUG nova.compute.provider_tree [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2411.927189] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 4defb2fcadeb491c96ffb754657a839d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2411.934599] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4defb2fcadeb491c96ffb754657a839d [ 2411.935548] env[61649]: DEBUG nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2411.937738] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 3a135c903a6143bea3fdbc6030d63c5f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2411.947867] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a135c903a6143bea3fdbc6030d63c5f [ 2411.948555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.321s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.949064] env[61649]: ERROR nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.949064] env[61649]: Faults: ['InvalidArgument'] [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Traceback (most recent call last): [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self.driver.spawn(context, instance, image_meta, [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self._fetch_image_if_missing(context, vi) [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] image_cache(vi, tmp_image_ds_loc) [ 2411.949064] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] vm_util.copy_virtual_disk( [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] session._wait_for_task(vmdk_copy_task) [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] return self.wait_for_task(task_ref) [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] return evt.wait() [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] result = hub.switch() [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] return self.greenlet.switch() [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2411.949390] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] self.f(*self.args, **self.kw) [ 2411.949691] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2411.949691] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] raise exceptions.translate_fault(task_info.error) [ 2411.949691] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.949691] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Faults: ['InvalidArgument'] [ 2411.949691] env[61649]: ERROR nova.compute.manager [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] [ 2411.949848] env[61649]: DEBUG nova.compute.utils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2411.951170] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Build of instance 4d429147-d3fe-4d99-af2a-e28a3829f434 was re-scheduled: A specified parameter was not correct: fileType [ 2411.951170] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2411.951547] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2411.951720] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2411.951894] env[61649]: DEBUG nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.952075] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2412.171485] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 361390e8fe7a4da3b7de560423d547c4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.181344] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 361390e8fe7a4da3b7de560423d547c4 [ 2412.183462] env[61649]: DEBUG nova.network.neutron [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.183462] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 0424fb8360aa40de8b1daaf636e9e1c0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.195631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0424fb8360aa40de8b1daaf636e9e1c0 [ 2412.196276] env[61649]: INFO nova.compute.manager [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Took 0.24 seconds to deallocate network for instance. [ 2412.198101] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 1b31ef791cd145929566490c9d803c5b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.233009] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b31ef791cd145929566490c9d803c5b [ 2412.237356] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg b60ed144a88b4dfd825fcfb184300b3e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.266355] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b60ed144a88b4dfd825fcfb184300b3e [ 2412.285422] env[61649]: INFO nova.scheduler.client.report [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Deleted allocations for instance 4d429147-d3fe-4d99-af2a-e28a3829f434 [ 2412.291443] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg af586a7bf81b49a490a76578ab758639 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.300475] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af586a7bf81b49a490a76578ab758639 [ 2412.301065] env[61649]: DEBUG oslo_concurrency.lockutils [None req-91285984-2463-4439-9eda-ce427b9ac096 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 507.211s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.301403] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 311.386s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.301638] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "4d429147-d3fe-4d99-af2a-e28a3829f434-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.301841] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.302003] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.303892] env[61649]: INFO nova.compute.manager [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Terminating instance [ 2412.305656] env[61649]: DEBUG nova.compute.manager [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2412.305840] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2412.306303] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c28ec69-0a9f-4d37-99f9-6d323b9eca8e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.315430] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8d35fd-109f-4303-8b92-35f248e753a7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.341808] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4d429147-d3fe-4d99-af2a-e28a3829f434 could not be found. [ 2412.342032] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2412.342210] env[61649]: INFO nova.compute.manager [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2412.342462] env[61649]: DEBUG oslo.service.loopingcall [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2412.342686] env[61649]: DEBUG nova.compute.manager [-] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2412.342782] env[61649]: DEBUG nova.network.neutron [-] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2412.361419] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 04f64fd495fc40bab36b14e0afe8e8e4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.367521] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04f64fd495fc40bab36b14e0afe8e8e4 [ 2412.367891] env[61649]: DEBUG nova.network.neutron [-] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.368299] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e9970bd7937f49398012141a1b1f082c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.375659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9970bd7937f49398012141a1b1f082c [ 2412.376184] env[61649]: INFO nova.compute.manager [-] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] Took 0.03 seconds to deallocate network for instance. [ 2412.379571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 4947247a7e114f6184bb4d05b7bc42b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.405658] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4947247a7e114f6184bb4d05b7bc42b4 [ 2412.420163] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg c0b0e5dabc52417ca1c2647225795106 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.453517] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0b0e5dabc52417ca1c2647225795106 [ 2412.456116] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.456453] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c1d15540-ceaf-488e-bb2c-736018af882c tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 9fd033de91434ae0a51c2dddc632dec3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2412.457174] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 70.443s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.457358] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 4d429147-d3fe-4d99-af2a-e28a3829f434] During sync_power_state the instance has a pending task (deleting). Skip. [ 2412.457522] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "4d429147-d3fe-4d99-af2a-e28a3829f434" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.468483] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fd033de91434ae0a51c2dddc632dec3 [ 2412.929090] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.930554] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.930554] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2413.930554] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2413.930554] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4442e44abb7a4dc797008c7a7406931c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2413.942750] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4442e44abb7a4dc797008c7a7406931c [ 2413.944147] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2413.944523] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2413.944707] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2413.944841] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2413.944970] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2413.945093] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2413.945583] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.945761] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.946070] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg eb8e52793651423790914fc35cfba143 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2413.954429] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb8e52793651423790914fc35cfba143 [ 2413.955286] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.955482] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.955641] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.955787] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2413.956815] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47175ad6-aa74-480f-920c-6f683279aea1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.965303] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d36187c-f5c3-49df-895a-eab26b31fb62 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.979778] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7ccfd1-e6b0-42db-b2b8-457d7d9a5b11 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.985698] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a55968-868b-4780-8959-49f1e5068894 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.013210] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181839MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2414.013345] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2414.013527] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2414.014291] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a4ecd254825741afac92d774db93176f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2414.034594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4ecd254825741afac92d774db93176f [ 2414.036994] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0b0f1d028c0b4a1683935c8330bb7027 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2414.044944] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b0f1d028c0b4a1683935c8330bb7027 [ 2414.060041] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.060201] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.060338] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.060462] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.060580] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2414.060752] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2414.060886] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2414.126289] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad5c11b-9ca2-4809-b23e-7dac14c31c6b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.133581] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab836db-0ec5-423a-9176-51b119e8a5c4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.163061] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15a3ff9-1cb4-4616-b6bd-8d120863a5cb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.169544] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f206afc-550f-4b00-9135-1a66e608619d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.182121] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2414.182550] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 08249a63c56d41c38a42d8fbc6a134c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2414.189943] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08249a63c56d41c38a42d8fbc6a134c7 [ 2414.190749] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2414.192942] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d06fbd19e5bc4b7c8356c1b319ec81f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2414.204869] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d06fbd19e5bc4b7c8356c1b319ec81f2 [ 2414.205467] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2414.205642] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.192s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.189116] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2450.994031] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ee69b502cd6c4eeaa04415af074f3936 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.004264] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee69b502cd6c4eeaa04415af074f3936 [ 2451.252125] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "574eac86-ac01-40f0-9e89-aa1fff108353" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2451.252381] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "574eac86-ac01-40f0-9e89-aa1fff108353" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2451.252837] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 04c501362fc54431aea8e7aa79e5844e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.261195] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04c501362fc54431aea8e7aa79e5844e [ 2451.261608] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2451.263272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 03a636d0332546bf9401a360e8729b22 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.291574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03a636d0332546bf9401a360e8729b22 [ 2451.306241] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2451.306465] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2451.308143] env[61649]: INFO nova.compute.claims [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2451.309753] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 460cb2bba5de4e5981b677513aca0647 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.339521] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 460cb2bba5de4e5981b677513aca0647 [ 2451.341289] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e18675cc82084f4a9ebaf36d9c1951d8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.348258] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e18675cc82084f4a9ebaf36d9c1951d8 [ 2451.428998] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b6b62c-f032-42b5-bf04-2373c7f31e9e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.436542] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eadfeda-56a0-4222-a8b2-bbd49e272e97 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.466428] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6c9bf9-715c-4bfa-8fd7-fe199d7151bc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.472939] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f02edc-50ab-4eaa-8b28-fa526cc6ccbe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.485454] env[61649]: DEBUG nova.compute.provider_tree [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2451.485915] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 1de278fdaaf7484687f14302235a1689 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.493398] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1de278fdaaf7484687f14302235a1689 [ 2451.494243] env[61649]: DEBUG nova.scheduler.client.report [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2451.496427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 307b224add954b57bcce7af95d1fbf70 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.506367] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 307b224add954b57bcce7af95d1fbf70 [ 2451.507015] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.201s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2451.507473] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2451.509169] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b60aec311f2f4b06801a625afaa22b7c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.539353] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b60aec311f2f4b06801a625afaa22b7c [ 2451.540628] env[61649]: DEBUG nova.compute.utils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2451.541227] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 5520c0cbf7324d66b1911025e47eafcd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.541968] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2451.542153] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2451.549514] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5520c0cbf7324d66b1911025e47eafcd [ 2451.550032] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2451.551820] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 78d34ddd4ba34204b7e567cd061cb9b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.579468] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78d34ddd4ba34204b7e567cd061cb9b4 [ 2451.582323] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 42ad26ee02e04f66b0e04f1ab246c696 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2451.586820] env[61649]: DEBUG nova.policy [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc5f71ebe35b4863a38dd7606ae87937', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72501ae7a7dd4f85801c096912a5af36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2451.609778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42ad26ee02e04f66b0e04f1ab246c696 [ 2451.610925] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2451.631530] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2451.631763] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2451.631924] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2451.632126] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2451.632278] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2451.632428] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2451.632632] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2451.632793] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2451.632960] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2451.633187] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2451.633391] env[61649]: DEBUG nova.virt.hardware [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2451.634221] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a032787-10c4-4051-8bd7-6e457afc2a06 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.642044] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a489047-2bdf-4dde-9977-cfa4b4d9bc05 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2451.914698] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Successfully created port: 93510b2a-9566-44fe-abfd-c6442a5d8e51 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2452.311468] env[61649]: DEBUG nova.compute.manager [req-99406b07-8708-423f-9f88-9433e7eaaf10 req-ce521a4f-cd7f-4121-97a5-cc3cecfa02cf service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Received event network-vif-plugged-93510b2a-9566-44fe-abfd-c6442a5d8e51 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2452.311742] env[61649]: DEBUG oslo_concurrency.lockutils [req-99406b07-8708-423f-9f88-9433e7eaaf10 req-ce521a4f-cd7f-4121-97a5-cc3cecfa02cf service nova] Acquiring lock "574eac86-ac01-40f0-9e89-aa1fff108353-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.311846] env[61649]: DEBUG oslo_concurrency.lockutils [req-99406b07-8708-423f-9f88-9433e7eaaf10 req-ce521a4f-cd7f-4121-97a5-cc3cecfa02cf service nova] Lock "574eac86-ac01-40f0-9e89-aa1fff108353-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.312030] env[61649]: DEBUG oslo_concurrency.lockutils [req-99406b07-8708-423f-9f88-9433e7eaaf10 req-ce521a4f-cd7f-4121-97a5-cc3cecfa02cf service nova] Lock "574eac86-ac01-40f0-9e89-aa1fff108353-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2452.312229] env[61649]: DEBUG nova.compute.manager [req-99406b07-8708-423f-9f88-9433e7eaaf10 req-ce521a4f-cd7f-4121-97a5-cc3cecfa02cf service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] No waiting events found dispatching network-vif-plugged-93510b2a-9566-44fe-abfd-c6442a5d8e51 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2452.312400] env[61649]: WARNING nova.compute.manager [req-99406b07-8708-423f-9f88-9433e7eaaf10 req-ce521a4f-cd7f-4121-97a5-cc3cecfa02cf service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Received unexpected event network-vif-plugged-93510b2a-9566-44fe-abfd-c6442a5d8e51 for instance with vm_state building and task_state spawning. [ 2452.383176] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Successfully updated port: 93510b2a-9566-44fe-abfd-c6442a5d8e51 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2452.383791] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 0376ad3682ea4704a297328fef5b3a5e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2452.394465] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0376ad3682ea4704a297328fef5b3a5e [ 2452.395114] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "refresh_cache-574eac86-ac01-40f0-9e89-aa1fff108353" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2452.395264] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "refresh_cache-574eac86-ac01-40f0-9e89-aa1fff108353" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2452.395415] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2452.395789] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d2df6b19e5ce41f190162a6632511f39 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2452.402100] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2df6b19e5ce41f190162a6632511f39 [ 2452.430961] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2452.634512] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Updating instance_info_cache with network_info: [{"id": "93510b2a-9566-44fe-abfd-c6442a5d8e51", "address": "fa:16:3e:a8:33:f5", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93510b2a-95", "ovs_interfaceid": "93510b2a-9566-44fe-abfd-c6442a5d8e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2452.635024] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 0cb38215dc2f49caa0f958cce5a442ee in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2452.644237] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb38215dc2f49caa0f958cce5a442ee [ 2452.644768] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "refresh_cache-574eac86-ac01-40f0-9e89-aa1fff108353" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2452.645027] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance network_info: |[{"id": "93510b2a-9566-44fe-abfd-c6442a5d8e51", "address": "fa:16:3e:a8:33:f5", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93510b2a-95", "ovs_interfaceid": "93510b2a-9566-44fe-abfd-c6442a5d8e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2452.645395] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:33:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93510b2a-9566-44fe-abfd-c6442a5d8e51', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2452.652947] env[61649]: DEBUG oslo.service.loopingcall [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2452.653366] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2452.653579] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc3db5ce-6ef8-4751-9066-34b44a9837f9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.675195] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2452.675195] env[61649]: value = "task-158328" [ 2452.675195] env[61649]: _type = "Task" [ 2452.675195] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2452.682598] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158328, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.185652] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158328, 'name': CreateVM_Task, 'duration_secs': 0.287113} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2453.185802] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2453.186429] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2453.186595] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2453.186914] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2453.187148] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-834f633d-bc03-4d14-b512-921648d9e871 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.191244] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2453.191244] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52695980-11fe-1648-9f8b-b59ed9830662" [ 2453.191244] env[61649]: _type = "Task" [ 2453.191244] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.198142] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52695980-11fe-1648-9f8b-b59ed9830662, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.702160] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2453.702445] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2453.702611] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2454.341485] env[61649]: DEBUG nova.compute.manager [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Received event network-changed-93510b2a-9566-44fe-abfd-c6442a5d8e51 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2454.341740] env[61649]: DEBUG nova.compute.manager [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Refreshing instance network info cache due to event network-changed-93510b2a-9566-44fe-abfd-c6442a5d8e51. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2454.341904] env[61649]: DEBUG oslo_concurrency.lockutils [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] Acquiring lock "refresh_cache-574eac86-ac01-40f0-9e89-aa1fff108353" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2454.342050] env[61649]: DEBUG oslo_concurrency.lockutils [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] Acquired lock "refresh_cache-574eac86-ac01-40f0-9e89-aa1fff108353" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2454.342214] env[61649]: DEBUG nova.network.neutron [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Refreshing network info cache for port 93510b2a-9566-44fe-abfd-c6442a5d8e51 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2454.342704] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] Expecting reply to msg bf524a644361477f99ecdb76dd913558 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2454.349388] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf524a644361477f99ecdb76dd913558 [ 2454.554259] env[61649]: DEBUG nova.network.neutron [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Updated VIF entry in instance network info cache for port 93510b2a-9566-44fe-abfd-c6442a5d8e51. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2454.554624] env[61649]: DEBUG nova.network.neutron [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Updating instance_info_cache with network_info: [{"id": "93510b2a-9566-44fe-abfd-c6442a5d8e51", "address": "fa:16:3e:a8:33:f5", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93510b2a-95", "ovs_interfaceid": "93510b2a-9566-44fe-abfd-c6442a5d8e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2454.555124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] Expecting reply to msg cc3923091dc64147a9c2cbdee26f9990 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2454.563555] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc3923091dc64147a9c2cbdee26f9990 [ 2454.564179] env[61649]: DEBUG oslo_concurrency.lockutils [req-7a86d99f-9430-41f6-836e-7463a94ffada req-a378361f-f708-46a7-95af-6980cda4d288 service nova] Releasing lock "refresh_cache-574eac86-ac01-40f0-9e89-aa1fff108353" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2461.396897] env[61649]: WARNING oslo_vmware.rw_handles [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2461.396897] env[61649]: ERROR oslo_vmware.rw_handles [ 2461.397526] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2461.399241] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2461.399486] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Copying Virtual Disk [datastore1] vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/88835c2d-3e32-4af7-abca-6de0fbe172eb/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2461.399774] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19947b0a-09d4-47c4-9980-07b5385d15db {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.409656] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 2461.409656] env[61649]: value = "task-158329" [ 2461.409656] env[61649]: _type = "Task" [ 2461.409656] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.417685] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.919246] env[61649]: DEBUG oslo_vmware.exceptions [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2461.919837] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2461.920551] env[61649]: ERROR nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2461.920551] env[61649]: Faults: ['InvalidArgument'] [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Traceback (most recent call last): [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] yield resources [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self.driver.spawn(context, instance, image_meta, [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self._fetch_image_if_missing(context, vi) [ 2461.920551] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] image_cache(vi, tmp_image_ds_loc) [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] vm_util.copy_virtual_disk( [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] session._wait_for_task(vmdk_copy_task) [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] return self.wait_for_task(task_ref) [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] return evt.wait() [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] result = hub.switch() [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2461.920952] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] return self.greenlet.switch() [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self.f(*self.args, **self.kw) [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] raise exceptions.translate_fault(task_info.error) [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Faults: ['InvalidArgument'] [ 2461.921316] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] [ 2461.921498] env[61649]: INFO nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Terminating instance [ 2461.923037] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2461.923245] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2461.923481] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b0d8fb8-5642-4b58-8621-fa45d0ca1c11 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.925753] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2461.925950] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2461.926659] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8000b098-4ec8-481e-929b-f78aea07e717 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.933247] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2461.933534] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b08e4690-b98d-48d3-aa61-7997ccfa7524 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.935601] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2461.935772] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2461.936740] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fa919e8-58d4-44a5-b541-6062a28eeb54 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.941536] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2461.941536] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b3a1b2-4455-3212-3d9b-94a4a4344f85" [ 2461.941536] env[61649]: _type = "Task" [ 2461.941536] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.948370] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52b3a1b2-4455-3212-3d9b-94a4a4344f85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.998267] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2461.998536] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2461.998719] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleting the datastore file [datastore1] 8295f484-2065-4a21-bdec-7d38e98f93e7 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2461.998972] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b879646-ed65-4d18-8961-c5c42b9d77ba {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.004339] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for the task: (returnval){ [ 2462.004339] env[61649]: value = "task-158331" [ 2462.004339] env[61649]: _type = "Task" [ 2462.004339] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.013228] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.451479] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2462.451831] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2462.451973] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90f3db21-8304-4837-8d85-4dc05b90a635 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.463497] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2462.463678] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Fetch image to [datastore1] vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2462.463843] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2462.464577] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1690e801-10af-4b8a-97cd-7e6f56d5c243 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.470768] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735e0f76-6c94-48f4-893e-8c5d4bc71e7c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.479656] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99cc947-c351-432d-b155-135c9df96d14 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.511929] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79da210-5999-4fd5-b901-b194ec1934e1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.519013] env[61649]: DEBUG oslo_vmware.api [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Task: {'id': task-158331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078225} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.520422] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2462.520619] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2462.520796] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2462.520971] env[61649]: INFO nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2462.522713] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cf9a066d-5f53-4714-aac1-96ffc2a80d87 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.524621] env[61649]: DEBUG nova.compute.claims [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2462.524793] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2462.525004] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2462.526916] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 079990f494664503bb1ee479756c4f6c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2462.547533] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2462.563053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 079990f494664503bb1ee479756c4f6c [ 2462.596314] env[61649]: DEBUG oslo_vmware.rw_handles [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2462.656307] env[61649]: DEBUG oslo_vmware.rw_handles [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2462.656659] env[61649]: DEBUG oslo_vmware.rw_handles [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2462.697794] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45eedf2-1208-40d3-9b64-da9223206d0e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.705604] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8d8274-ae96-4810-81af-1c3983c554e8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.735902] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd686185-cda4-4c4c-b172-113f8f5ba04e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.742727] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88a5ea5-a421-4708-8154-ee74cd7095b0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.755346] env[61649]: DEBUG nova.compute.provider_tree [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2462.755946] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 0bcd3f35838e473195c6401d8dcc93ff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2462.763595] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bcd3f35838e473195c6401d8dcc93ff [ 2462.764541] env[61649]: DEBUG nova.scheduler.client.report [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2462.766803] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 2bd60c6e22df4267b234e4aa70ca82ff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2462.777018] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bd60c6e22df4267b234e4aa70ca82ff [ 2462.777832] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2462.779184] env[61649]: ERROR nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2462.779184] env[61649]: Faults: ['InvalidArgument'] [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Traceback (most recent call last): [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self.driver.spawn(context, instance, image_meta, [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self._fetch_image_if_missing(context, vi) [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] image_cache(vi, tmp_image_ds_loc) [ 2462.779184] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] vm_util.copy_virtual_disk( [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] session._wait_for_task(vmdk_copy_task) [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] return self.wait_for_task(task_ref) [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] return evt.wait() [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] result = hub.switch() [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] return self.greenlet.switch() [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2462.779571] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] self.f(*self.args, **self.kw) [ 2462.779979] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2462.779979] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] raise exceptions.translate_fault(task_info.error) [ 2462.779979] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2462.779979] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Faults: ['InvalidArgument'] [ 2462.779979] env[61649]: ERROR nova.compute.manager [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] [ 2462.779979] env[61649]: DEBUG nova.compute.utils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2462.781289] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Build of instance 8295f484-2065-4a21-bdec-7d38e98f93e7 was re-scheduled: A specified parameter was not correct: fileType [ 2462.781289] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2462.781672] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2462.781847] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2462.782009] env[61649]: DEBUG nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2462.782192] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2463.004922] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 7c81f9e56b254880ba135e36e37ea5d5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.012721] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c81f9e56b254880ba135e36e37ea5d5 [ 2463.013267] env[61649]: DEBUG nova.network.neutron [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2463.013723] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 6120ec167eee413c9a2fe02e894eed1d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.026208] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6120ec167eee413c9a2fe02e894eed1d [ 2463.026912] env[61649]: INFO nova.compute.manager [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Took 0.24 seconds to deallocate network for instance. [ 2463.028634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg b54526a1f670435b94535c5ad151faca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.058746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b54526a1f670435b94535c5ad151faca [ 2463.061615] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 7859e833fc0d45adb15a241013f187a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.091354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7859e833fc0d45adb15a241013f187a9 [ 2463.110024] env[61649]: INFO nova.scheduler.client.report [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Deleted allocations for instance 8295f484-2065-4a21-bdec-7d38e98f93e7 [ 2463.116500] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 70bbb878bbfc40f3b4066802ef44c3c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.126333] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70bbb878bbfc40f3b4066802ef44c3c7 [ 2463.127019] env[61649]: DEBUG oslo_concurrency.lockutils [None req-83e64131-f514-4104-be92-16d6c9c5a97d tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 489.830s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.127440] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 293.314s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2463.127846] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Acquiring lock "8295f484-2065-4a21-bdec-7d38e98f93e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2463.128226] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2463.128564] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.130571] env[61649]: INFO nova.compute.manager [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Terminating instance [ 2463.132348] env[61649]: DEBUG nova.compute.manager [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2463.132684] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2463.133272] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-418a8187-ec1f-48ec-bf0f-398547d7a26d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.142569] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2639f3-b8e6-4025-bb07-147245e4a2f3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.169971] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8295f484-2065-4a21-bdec-7d38e98f93e7 could not be found. [ 2463.170333] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2463.170629] env[61649]: INFO nova.compute.manager [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2463.171039] env[61649]: DEBUG oslo.service.loopingcall [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2463.171356] env[61649]: DEBUG nova.compute.manager [-] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2463.171549] env[61649]: DEBUG nova.network.neutron [-] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2463.190633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bf6c379b06d04e2cbdec2c074b314d13 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.197144] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf6c379b06d04e2cbdec2c074b314d13 [ 2463.197675] env[61649]: DEBUG nova.network.neutron [-] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2463.198204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 510a3162327b420cb8d05b88577805ff in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.205975] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 510a3162327b420cb8d05b88577805ff [ 2463.206526] env[61649]: INFO nova.compute.manager [-] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] Took 0.03 seconds to deallocate network for instance. [ 2463.210227] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 8ead540679e94c83b83f32dbc26406b3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.237611] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ead540679e94c83b83f32dbc26406b3 [ 2463.252922] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 1acab1e2732e4277a6f8073f632c2177 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.286954] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1acab1e2732e4277a6f8073f632c2177 [ 2463.290089] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.290410] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3362cbe3-449d-48dc-bf26-bebd0d418f3c tempest-AttachVolumeNegativeTest-227144255 tempest-AttachVolumeNegativeTest-227144255-project-member] Expecting reply to msg 45b2f8c653da403f974fee7b4ef62b9a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2463.291655] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 121.278s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2463.291853] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 8295f484-2065-4a21-bdec-7d38e98f93e7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2463.292049] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "8295f484-2065-4a21-bdec-7d38e98f93e7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.306273] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45b2f8c653da403f974fee7b4ef62b9a [ 2464.928697] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2464.929299] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2465.929854] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2467.929201] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2471.924529] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2472.929057] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2474.929245] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2474.929670] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2474.929670] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2474.930158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 696959e4be884ceb9ce267120bd2cf04 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2474.943425] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 696959e4be884ceb9ce267120bd2cf04 [ 2474.945036] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.945187] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.945319] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.945445] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.945566] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2474.945687] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2474.946186] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2474.946508] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 9941114fbbd5495c8025818f6afc11ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2474.954622] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9941114fbbd5495c8025818f6afc11ba [ 2474.955437] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2474.955648] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2474.955806] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2474.955950] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2474.957029] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b05580-fc77-4126-ab4a-1834d86e0155 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.965742] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0022a393-d7b0-4984-87e4-bf4bafb45adf {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.979109] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e83092c-be37-4930-8e20-dd6eb06996ff {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2474.985869] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2994271e-f805-4bba-8d3f-296d1f25ecf9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.014095] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181834MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2475.014240] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2475.014434] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2475.015275] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2cc4325c51e44ed7ac9007118fa81396 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2475.036651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cc4325c51e44ed7ac9007118fa81396 [ 2475.039080] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 560f440472d44d87a445f87ddd6d5cd6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2475.047096] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 560f440472d44d87a445f87ddd6d5cd6 [ 2475.062077] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.062221] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.062349] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.062613] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.062613] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 574eac86-ac01-40f0-9e89-aa1fff108353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.062788] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2475.062888] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2475.127145] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de6c178-c673-4a51-a70e-20e7941d0e18 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.134215] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8a3fd4-4000-418f-9895-12e4584e7730 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.163026] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e89991d-3db4-41c7-b3f2-06403583639a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.169599] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1489abbb-bc45-4cdd-9687-66ae9dbb0a5e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.182972] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2475.183379] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ff71aa7e524941498236e777fabc532d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2475.190110] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff71aa7e524941498236e777fabc532d [ 2475.190925] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2475.193106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg baf318f0b6924bc8bc178755a64c15a3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2475.203074] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baf318f0b6924bc8bc178755a64c15a3 [ 2475.203657] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2475.203830] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.189s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2476.187340] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2476.929347] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2481.925535] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2481.926188] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 65a1b0e1bf0845aa8f2cbe97c27cba8c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2481.939111] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65a1b0e1bf0845aa8f2cbe97c27cba8c [ 2493.911983] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 6c0340b53f5e44dfaf2d687d9f200043 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2493.921782] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c0340b53f5e44dfaf2d687d9f200043 [ 2493.921782] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.463456] env[61649]: WARNING oslo_vmware.rw_handles [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2510.463456] env[61649]: ERROR oslo_vmware.rw_handles [ 2510.464381] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2510.465677] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2510.465922] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Copying Virtual Disk [datastore1] vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/2f8dc3aa-3d6c-4972-bbc8-47dbfa08cd92/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2510.466202] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fad52c03-4553-47b2-9d4b-ab99c43fc158 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.474338] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2510.474338] env[61649]: value = "task-158332" [ 2510.474338] env[61649]: _type = "Task" [ 2510.474338] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2510.482068] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2510.985029] env[61649]: DEBUG oslo_vmware.exceptions [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2510.985592] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2510.986322] env[61649]: ERROR nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2510.986322] env[61649]: Faults: ['InvalidArgument'] [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Traceback (most recent call last): [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] yield resources [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self.driver.spawn(context, instance, image_meta, [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self._fetch_image_if_missing(context, vi) [ 2510.986322] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] image_cache(vi, tmp_image_ds_loc) [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] vm_util.copy_virtual_disk( [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] session._wait_for_task(vmdk_copy_task) [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] return self.wait_for_task(task_ref) [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] return evt.wait() [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] result = hub.switch() [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2510.986698] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] return self.greenlet.switch() [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self.f(*self.args, **self.kw) [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] raise exceptions.translate_fault(task_info.error) [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Faults: ['InvalidArgument'] [ 2510.987063] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] [ 2510.987658] env[61649]: INFO nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Terminating instance [ 2510.989423] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2510.989634] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2510.989907] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db1a6ed5-285c-422b-b8cd-e709cbabc52e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.992062] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2510.992262] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2510.992982] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4628ba30-80c8-4c4d-8390-bc29f15f53a6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.999759] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2511.000054] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bafbc588-9521-4481-9913-f6431ea03287 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.002090] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2511.002265] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2511.003256] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfcc6cb1-d2af-4660-b899-443e658418ae {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.007936] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2511.007936] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5288eae6-0d38-bf5b-2f08-471f453d8c93" [ 2511.007936] env[61649]: _type = "Task" [ 2511.007936] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.021887] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2511.022113] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2511.022374] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4129b9f0-a9c6-4ad2-957a-115c49e3b423 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.042197] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2511.042444] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Fetch image to [datastore1] vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2511.042567] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2511.043261] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7719506-c5a8-48ca-8080-c08cbcd489c9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.049544] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c87f83c-8824-45b2-bed5-77aea27a5f57 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.058042] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb03b34a-e5a2-4bdc-a8e4-9491c078f71d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.089668] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028eb10c-d6e1-48a4-ad35-0aeee63c3ef4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.092051] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2511.092253] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2511.092428] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleting the datastore file [datastore1] da4cbfc3-cf43-4cf6-b391-d7183699e58d {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2511.092649] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb7a1049-4c99-4c24-a651-b26e398a1a0c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.097026] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0127ed52-7fb9-4ae0-8ea8-e6703804f8af {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.099576] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2511.099576] env[61649]: value = "task-158334" [ 2511.099576] env[61649]: _type = "Task" [ 2511.099576] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.106643] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158334, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.122935] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2511.175578] env[61649]: DEBUG oslo_vmware.rw_handles [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2511.237629] env[61649]: DEBUG oslo_vmware.rw_handles [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2511.237825] env[61649]: DEBUG oslo_vmware.rw_handles [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2511.609221] env[61649]: DEBUG oslo_vmware.api [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158334, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068328} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2511.609563] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2511.609750] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2511.609962] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2511.610150] env[61649]: INFO nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2511.612275] env[61649]: DEBUG nova.compute.claims [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2511.612455] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2511.612668] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2511.614489] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg eb484e7f75054d55a611c226f07aeebf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2511.643785] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb484e7f75054d55a611c226f07aeebf [ 2511.710871] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dec872-79a3-4f4f-a14a-c2c29ede1a67 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.718277] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1618c1b-25cf-4fdb-8205-ba7ec19f6284 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.747230] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c524e33-d878-4749-8183-a0457fc8a092 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.754622] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd35a00-5484-4941-9d20-eda260a50656 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.768267] env[61649]: DEBUG nova.compute.provider_tree [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2511.768824] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 756613019c184c8bbb93ce6fb1267942 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2511.775988] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 756613019c184c8bbb93ce6fb1267942 [ 2511.776855] env[61649]: DEBUG nova.scheduler.client.report [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2511.779068] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7db021c5a5eb47f485b67841893d4fca in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2511.789050] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7db021c5a5eb47f485b67841893d4fca [ 2511.789739] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.177s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2511.790287] env[61649]: ERROR nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2511.790287] env[61649]: Faults: ['InvalidArgument'] [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Traceback (most recent call last): [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self.driver.spawn(context, instance, image_meta, [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self._fetch_image_if_missing(context, vi) [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] image_cache(vi, tmp_image_ds_loc) [ 2511.790287] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] vm_util.copy_virtual_disk( [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] session._wait_for_task(vmdk_copy_task) [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] return self.wait_for_task(task_ref) [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] return evt.wait() [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] result = hub.switch() [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] return self.greenlet.switch() [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2511.790603] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] self.f(*self.args, **self.kw) [ 2511.790909] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2511.790909] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] raise exceptions.translate_fault(task_info.error) [ 2511.790909] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2511.790909] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Faults: ['InvalidArgument'] [ 2511.790909] env[61649]: ERROR nova.compute.manager [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] [ 2511.791038] env[61649]: DEBUG nova.compute.utils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2511.792381] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Build of instance da4cbfc3-cf43-4cf6-b391-d7183699e58d was re-scheduled: A specified parameter was not correct: fileType [ 2511.792381] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2511.792754] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2511.792929] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2511.793102] env[61649]: DEBUG nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2511.793315] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2512.060736] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg fca7225709bd467e83cc93839e2b0fb8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.068223] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fca7225709bd467e83cc93839e2b0fb8 [ 2512.069023] env[61649]: DEBUG nova.network.neutron [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2512.069741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 2a3e85ccf4de4db7ae287c98d97f7ebb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.078461] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a3e85ccf4de4db7ae287c98d97f7ebb [ 2512.079024] env[61649]: INFO nova.compute.manager [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Took 0.29 seconds to deallocate network for instance. [ 2512.080798] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 3fd410425d144c3fb2a276ee38cc94dd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.111030] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fd410425d144c3fb2a276ee38cc94dd [ 2512.113632] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 8905f04cb3b84e47975830acd4f493c1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.142820] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8905f04cb3b84e47975830acd4f493c1 [ 2512.161885] env[61649]: INFO nova.scheduler.client.report [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted allocations for instance da4cbfc3-cf43-4cf6-b391-d7183699e58d [ 2512.167706] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 5bb6fe3924654cafa589c8f78f4b2400 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.177158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bb6fe3924654cafa589c8f78f4b2400 [ 2512.177659] env[61649]: DEBUG oslo_concurrency.lockutils [None req-81b224f4-d4a9-4540-8dc5-59a4b5b4b9e0 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 452.936s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.177923] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 257.981s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2512.178157] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2512.178363] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2512.178527] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.180406] env[61649]: INFO nova.compute.manager [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Terminating instance [ 2512.182129] env[61649]: DEBUG nova.compute.manager [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2512.182320] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2512.182779] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8cb97c4d-5365-46bf-9894-af0718d6e026 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.191635] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35943ca-1b00-40b1-8d25-a8b8e31a7c0d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.216754] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance da4cbfc3-cf43-4cf6-b391-d7183699e58d could not be found. [ 2512.216958] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2512.217143] env[61649]: INFO nova.compute.manager [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2512.217388] env[61649]: DEBUG oslo.service.loopingcall [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2512.217617] env[61649]: DEBUG nova.compute.manager [-] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2512.217717] env[61649]: DEBUG nova.network.neutron [-] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2512.236825] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c6dcf74113194d0c9b5b6922a2b3ea62 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.244933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6dcf74113194d0c9b5b6922a2b3ea62 [ 2512.245292] env[61649]: DEBUG nova.network.neutron [-] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2512.245667] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 18bd9527605d4a92ad3b17b2de0bdd55 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.253715] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18bd9527605d4a92ad3b17b2de0bdd55 [ 2512.254165] env[61649]: INFO nova.compute.manager [-] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] Took 0.04 seconds to deallocate network for instance. [ 2512.258688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 467b56c4aff64c878c9b54757fbdc38b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.283276] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 467b56c4aff64c878c9b54757fbdc38b [ 2512.297579] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 531a1053a217464a9f89fe4a97764703 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.331723] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 531a1053a217464a9f89fe4a97764703 [ 2512.334663] env[61649]: DEBUG oslo_concurrency.lockutils [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.334986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-3b75c89f-54f7-4b36-967d-8edd88b1295f tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg a9dd495ec7bc40d78655bbb81dadcc32 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2512.335683] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 170.322s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2512.335871] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: da4cbfc3-cf43-4cf6-b391-d7183699e58d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2512.336068] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "da4cbfc3-cf43-4cf6-b391-d7183699e58d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.343369] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9dd495ec7bc40d78655bbb81dadcc32 [ 2524.930223] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2524.930542] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2525.929801] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2527.928746] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2532.929573] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2533.924379] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.929053] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.929368] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2535.929368] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2535.929982] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg eb7d2a02919b498a8ccfb40a2dbcdadf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2535.942117] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb7d2a02919b498a8ccfb40a2dbcdadf [ 2535.943521] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2535.943671] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2535.943802] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2535.943925] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2535.944068] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2535.944543] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.944721] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.945026] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a7060d8bf68746ad8a9fba102ac465a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2535.954001] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7060d8bf68746ad8a9fba102ac465a5 [ 2535.954846] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2535.955049] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2535.955208] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2535.955358] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2535.956422] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0533ec08-ecf3-4949-846e-938e588a00a7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.965082] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a963fd09-c0b3-4d12-99f7-bc25ea0a74de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.979801] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa559a55-d580-43ee-949d-6b368219aa55 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.985979] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ffcfe9-ecde-498c-ab28-bc760d337d5e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.013919] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181832MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2536.014063] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2536.014249] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2536.015089] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b5cb120b3c1e4b7b8128248e6ad431b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2536.033847] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5cb120b3c1e4b7b8128248e6ad431b5 [ 2536.035642] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b76ab7f3b2dd46709d453920277952cf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2536.044176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b76ab7f3b2dd46709d453920277952cf [ 2536.059802] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 56652181-0379-4532-9b2a-e6138cbd73ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2536.059971] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2536.060131] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2536.060259] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 574eac86-ac01-40f0-9e89-aa1fff108353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2536.060456] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2536.060568] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2536.120503] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b1ce99-5542-4a17-9088-25a56e7a75c5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.127513] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9596bf66-f0d9-4bb7-b73a-096cc8e184c8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.156411] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37191d1-fe57-4933-ad65-bbc4ae452b0e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.164780] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a468149-66dc-4268-b1a6-61f6b6b2c5eb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.180458] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2536.180938] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 36b12a5ddf4743178928551690f05898 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2536.188117] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36b12a5ddf4743178928551690f05898 [ 2536.189119] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2536.191402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 97ef216dc4744c978b243e7aac115c36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2536.202078] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97ef216dc4744c978b243e7aac115c36 [ 2536.202738] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2536.202918] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.189s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2538.188547] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2541.745028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2541.745337] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2541.745699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 8247488d80a54c529670b5de841bbb29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2541.753619] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8247488d80a54c529670b5de841bbb29 [ 2541.754028] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2541.755618] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 16e7375f72754c8b830309a51b7e2635 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2541.784552] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16e7375f72754c8b830309a51b7e2635 [ 2541.800028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2541.800028] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2541.801469] env[61649]: INFO nova.compute.claims [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2541.802974] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 477e43767a464dfaa79e9273193a9385 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2541.833001] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 477e43767a464dfaa79e9273193a9385 [ 2541.834621] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 94883e7650ed4dfaa8fc5faae083ded0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2541.841559] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94883e7650ed4dfaa8fc5faae083ded0 [ 2541.907423] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bf5038-d919-4030-b299-98e305905eb9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.914771] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347de767-0db5-4be9-b774-36447bb6a15c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.944296] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1498109e-5b1e-49e3-8b6f-954a4669da8c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.950766] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e034e6-19ea-4444-adef-a0e068d328b9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.963083] env[61649]: DEBUG nova.compute.provider_tree [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2541.963533] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 877a55d7ea524119aa25e08db9eb0240 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2541.970710] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 877a55d7ea524119aa25e08db9eb0240 [ 2541.971577] env[61649]: DEBUG nova.scheduler.client.report [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2541.973676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg ba55b0c5477d4afd9bb8780a97973702 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2541.983327] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba55b0c5477d4afd9bb8780a97973702 [ 2541.983935] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.184s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2541.984400] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2541.986068] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 064fd22785574ec98746229942119de5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2542.015778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 064fd22785574ec98746229942119de5 [ 2542.017198] env[61649]: DEBUG nova.compute.utils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2542.017745] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b93464ee40d34a7b9a5f667bdb374c7c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2542.018499] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2542.018660] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2542.025486] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b93464ee40d34a7b9a5f667bdb374c7c [ 2542.026095] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2542.027472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b12fd1d855144126ab1c25033a61ecfb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2542.055863] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b12fd1d855144126ab1c25033a61ecfb [ 2542.058233] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b1aee6407df54e7e89e46ff1b77ee44c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2542.085101] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1aee6407df54e7e89e46ff1b77ee44c [ 2542.085852] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2542.089256] env[61649]: DEBUG nova.policy [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eecfef918474dc8ad298d9eb189f56f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3939f446f6f04aa08a0b91101e55572b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2542.106547] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2542.106781] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2542.106940] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2542.107128] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2542.107279] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2542.107427] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2542.107634] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2542.107798] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2542.107967] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2542.108165] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2542.108343] env[61649]: DEBUG nova.virt.hardware [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2542.109386] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fac0d7-b546-4273-96a9-bf91df73b664 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2542.117982] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68f3b50-9def-419a-abf1-b59b88359220 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2542.324362] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Successfully created port: f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2542.825823] env[61649]: DEBUG nova.compute.manager [req-ab36ae93-ba07-4878-b129-f1e8f6df296d req-5c67f8bd-1b9f-4479-9789-d4d5d686fb37 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Received event network-vif-plugged-f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2542.826099] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab36ae93-ba07-4878-b129-f1e8f6df296d req-5c67f8bd-1b9f-4479-9789-d4d5d686fb37 service nova] Acquiring lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2542.826329] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab36ae93-ba07-4878-b129-f1e8f6df296d req-5c67f8bd-1b9f-4479-9789-d4d5d686fb37 service nova] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2542.826508] env[61649]: DEBUG oslo_concurrency.lockutils [req-ab36ae93-ba07-4878-b129-f1e8f6df296d req-5c67f8bd-1b9f-4479-9789-d4d5d686fb37 service nova] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2542.826680] env[61649]: DEBUG nova.compute.manager [req-ab36ae93-ba07-4878-b129-f1e8f6df296d req-5c67f8bd-1b9f-4479-9789-d4d5d686fb37 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] No waiting events found dispatching network-vif-plugged-f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2542.826847] env[61649]: WARNING nova.compute.manager [req-ab36ae93-ba07-4878-b129-f1e8f6df296d req-5c67f8bd-1b9f-4479-9789-d4d5d686fb37 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Received unexpected event network-vif-plugged-f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 for instance with vm_state building and task_state spawning. [ 2542.913222] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Successfully updated port: f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2542.913849] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg c8d35fdbc7024e4aa815b9c81776e5dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2542.921320] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8d35fdbc7024e4aa815b9c81776e5dc [ 2542.921935] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "refresh_cache-0a50d7c8-4079-431b-bffa-b9e95b3a4cef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2542.922070] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "refresh_cache-0a50d7c8-4079-431b-bffa-b9e95b3a4cef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2542.922213] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2542.922576] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 46dbe8bf62054db0807b39058c47f2d7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2542.931560] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46dbe8bf62054db0807b39058c47f2d7 [ 2542.961577] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2543.253749] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Updating instance_info_cache with network_info: [{"id": "f5c77cd5-4a8d-48c1-8dfe-4596a73a4519", "address": "fa:16:3e:01:26:f9", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c77cd5-4a", "ovs_interfaceid": "f5c77cd5-4a8d-48c1-8dfe-4596a73a4519", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2543.254285] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 0df8e65f521044a693ae619a4e619a9b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2543.266224] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0df8e65f521044a693ae619a4e619a9b [ 2543.266834] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "refresh_cache-0a50d7c8-4079-431b-bffa-b9e95b3a4cef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2543.267107] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance network_info: |[{"id": "f5c77cd5-4a8d-48c1-8dfe-4596a73a4519", "address": "fa:16:3e:01:26:f9", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c77cd5-4a", "ovs_interfaceid": "f5c77cd5-4a8d-48c1-8dfe-4596a73a4519", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2543.267487] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:26:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5c77cd5-4a8d-48c1-8dfe-4596a73a4519', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2543.275035] env[61649]: DEBUG oslo.service.loopingcall [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2543.275537] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2543.276128] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39ec71f3-6e83-4fa7-90da-3dce595a6337 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.294899] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2543.294899] env[61649]: value = "task-158335" [ 2543.294899] env[61649]: _type = "Task" [ 2543.294899] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2543.301942] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158335, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2543.806706] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158335, 'name': CreateVM_Task, 'duration_secs': 0.318921} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2543.806924] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2543.822981] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2543.823150] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2543.823501] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2543.823769] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb5dde8-e7c5-4345-805e-8016ccbdad4e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.830478] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2543.830478] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c645b8-7d4c-cdab-e9ec-f8267ac6e214" [ 2543.830478] env[61649]: _type = "Task" [ 2543.830478] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2543.840995] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c645b8-7d4c-cdab-e9ec-f8267ac6e214, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2544.339620] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2544.339950] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2544.340215] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2544.859319] env[61649]: DEBUG nova.compute.manager [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Received event network-changed-f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2544.859542] env[61649]: DEBUG nova.compute.manager [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Refreshing instance network info cache due to event network-changed-f5c77cd5-4a8d-48c1-8dfe-4596a73a4519. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2544.859733] env[61649]: DEBUG oslo_concurrency.lockutils [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] Acquiring lock "refresh_cache-0a50d7c8-4079-431b-bffa-b9e95b3a4cef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2544.859896] env[61649]: DEBUG oslo_concurrency.lockutils [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] Acquired lock "refresh_cache-0a50d7c8-4079-431b-bffa-b9e95b3a4cef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2544.860105] env[61649]: DEBUG nova.network.neutron [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Refreshing network info cache for port f5c77cd5-4a8d-48c1-8dfe-4596a73a4519 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2544.860588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] Expecting reply to msg dad3c9b52b194dfdb6897eb2ed925289 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2544.873022] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dad3c9b52b194dfdb6897eb2ed925289 [ 2545.186382] env[61649]: DEBUG nova.network.neutron [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Updated VIF entry in instance network info cache for port f5c77cd5-4a8d-48c1-8dfe-4596a73a4519. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2545.186795] env[61649]: DEBUG nova.network.neutron [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Updating instance_info_cache with network_info: [{"id": "f5c77cd5-4a8d-48c1-8dfe-4596a73a4519", "address": "fa:16:3e:01:26:f9", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5c77cd5-4a", "ovs_interfaceid": "f5c77cd5-4a8d-48c1-8dfe-4596a73a4519", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2545.187308] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] Expecting reply to msg 5f508b202644470c97e5745ca2d0eef4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2545.195690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f508b202644470c97e5745ca2d0eef4 [ 2545.196303] env[61649]: DEBUG oslo_concurrency.lockutils [req-89fd0cb4-b8a9-4dc0-85f5-0ce7cf39aeaf req-7d0b5a89-8975-4347-a773-cbfe18312d25 service nova] Releasing lock "refresh_cache-0a50d7c8-4079-431b-bffa-b9e95b3a4cef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2556.291682] env[61649]: WARNING oslo_vmware.rw_handles [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2556.291682] env[61649]: ERROR oslo_vmware.rw_handles [ 2556.291682] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2556.292499] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2556.292499] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Copying Virtual Disk [datastore1] vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/791636a4-97c2-46dd-93d2-6d94ad29c755/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2556.292668] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b6355d0-6050-4c13-8fd4-e4b9f719477c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.300743] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2556.300743] env[61649]: value = "task-158336" [ 2556.300743] env[61649]: _type = "Task" [ 2556.300743] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2556.309191] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2556.811271] env[61649]: DEBUG oslo_vmware.exceptions [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2556.811559] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2556.812201] env[61649]: ERROR nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2556.812201] env[61649]: Faults: ['InvalidArgument'] [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Traceback (most recent call last): [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] yield resources [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self.driver.spawn(context, instance, image_meta, [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self._fetch_image_if_missing(context, vi) [ 2556.812201] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] image_cache(vi, tmp_image_ds_loc) [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] vm_util.copy_virtual_disk( [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] session._wait_for_task(vmdk_copy_task) [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] return self.wait_for_task(task_ref) [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] return evt.wait() [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] result = hub.switch() [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2556.812690] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] return self.greenlet.switch() [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self.f(*self.args, **self.kw) [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] raise exceptions.translate_fault(task_info.error) [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Faults: ['InvalidArgument'] [ 2556.813148] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] [ 2556.813148] env[61649]: INFO nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Terminating instance [ 2556.814044] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2556.814252] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2556.814490] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5eef3c69-ad2d-4607-a4d9-f58e032434b4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.817061] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2556.817254] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2556.817991] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd5effe-422b-445e-a8c6-f0d75e633252 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.824959] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2556.825225] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f676e87-b242-442a-a43d-778260d520bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.827551] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2556.827721] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2556.828727] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b41860ec-f36c-4fd1-bf0b-d8ffbc05b935 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.834455] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2556.834455] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52016b13-ff50-f32b-03d6-f325d24a1b60" [ 2556.834455] env[61649]: _type = "Task" [ 2556.834455] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2556.849669] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2556.849990] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating directory with path [datastore1] vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2556.850234] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bbd2955-a1c7-46df-a7e8-5a5c00e42c07 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.870202] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Created directory with path [datastore1] vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2556.870398] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Fetch image to [datastore1] vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2556.870632] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2556.871555] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc920d4-2b9d-4577-8df6-35853e945768 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.878850] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b3b374-415c-4584-b6c4-d33eeac0588f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.890250] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9610752-50ba-4192-964c-aaa2092f90ca {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.895076] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2556.895274] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2556.895452] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleting the datastore file [datastore1] 56652181-0379-4532-9b2a-e6138cbd73ad {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2556.896172] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfcc0a2e-59dd-4ea0-ada8-bf76e61aaf1a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.924533] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c679fd0-f5f0-4ba2-ba8b-c5be75bbc245 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.927405] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2556.927405] env[61649]: value = "task-158338" [ 2556.927405] env[61649]: _type = "Task" [ 2556.927405] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2556.932990] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a73d5e60-1422-42be-ba5d-f3d8da047a26 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2556.937588] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2556.959377] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2557.009658] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2557.104949] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2557.105427] env[61649]: DEBUG oslo_vmware.rw_handles [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2557.437119] env[61649]: DEBUG oslo_vmware.api [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081296} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2557.437871] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2557.438204] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2557.438515] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2557.438814] env[61649]: INFO nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2557.441127] env[61649]: DEBUG nova.compute.claims [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2557.441457] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2557.441803] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2557.443688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b738ba85dff04256bba9d4e90de708e6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.474508] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b738ba85dff04256bba9d4e90de708e6 [ 2557.545483] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5aad874-2208-421c-85d0-67d453dc3880 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.552961] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef095a48-a980-410f-bfe8-26048c0d7c92 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.583305] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b3a742-cb5b-4123-a60b-d471446dc8ed {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.590071] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4604f13b-8b6b-4308-8495-79e3628d7ec3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.602736] env[61649]: DEBUG nova.compute.provider_tree [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2557.603314] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b9134484c41d42b58d16aa884115fe71 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.610683] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9134484c41d42b58d16aa884115fe71 [ 2557.611557] env[61649]: DEBUG nova.scheduler.client.report [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2557.613840] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 564b1258f1424596a90d5b4b66e155af in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.623815] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 564b1258f1424596a90d5b4b66e155af [ 2557.624470] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.183s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2557.625008] env[61649]: ERROR nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2557.625008] env[61649]: Faults: ['InvalidArgument'] [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Traceback (most recent call last): [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self.driver.spawn(context, instance, image_meta, [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self._fetch_image_if_missing(context, vi) [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] image_cache(vi, tmp_image_ds_loc) [ 2557.625008] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] vm_util.copy_virtual_disk( [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] session._wait_for_task(vmdk_copy_task) [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] return self.wait_for_task(task_ref) [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] return evt.wait() [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] result = hub.switch() [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] return self.greenlet.switch() [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2557.625316] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] self.f(*self.args, **self.kw) [ 2557.625621] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2557.625621] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] raise exceptions.translate_fault(task_info.error) [ 2557.625621] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2557.625621] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Faults: ['InvalidArgument'] [ 2557.625621] env[61649]: ERROR nova.compute.manager [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] [ 2557.625737] env[61649]: DEBUG nova.compute.utils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2557.627011] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Build of instance 56652181-0379-4532-9b2a-e6138cbd73ad was re-scheduled: A specified parameter was not correct: fileType [ 2557.627011] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2557.627423] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2557.627605] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2557.627777] env[61649]: DEBUG nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2557.627945] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2557.834507] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 13a2c2f699674381afa1d493df4e17e8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.842109] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13a2c2f699674381afa1d493df4e17e8 [ 2557.842667] env[61649]: DEBUG nova.network.neutron [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2557.843127] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg a7a08e505a154a53a41d1df0bd8519cf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.852742] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7a08e505a154a53a41d1df0bd8519cf [ 2557.853328] env[61649]: INFO nova.compute.manager [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Took 0.23 seconds to deallocate network for instance. [ 2557.854963] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d1b5776f201846fcb62367a6a49cbce4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.886596] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1b5776f201846fcb62367a6a49cbce4 [ 2557.889585] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg ce624601875147eaa19376047bd87c7d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.921186] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce624601875147eaa19376047bd87c7d [ 2557.941764] env[61649]: INFO nova.scheduler.client.report [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted allocations for instance 56652181-0379-4532-9b2a-e6138cbd73ad [ 2557.947657] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg ce48e32a67b84a27b7c637475cb54b78 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2557.960763] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce48e32a67b84a27b7c637475cb54b78 [ 2557.961334] env[61649]: DEBUG oslo_concurrency.lockutils [None req-f036d120-7b27-427c-b975-fb81bb316963 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "56652181-0379-4532-9b2a-e6138cbd73ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 408.331s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2557.961552] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "56652181-0379-4532-9b2a-e6138cbd73ad" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 215.947s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2557.961728] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] During sync_power_state the instance has a pending task (spawning). Skip. [ 2557.961892] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "56652181-0379-4532-9b2a-e6138cbd73ad" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2557.962105] env[61649]: DEBUG oslo_concurrency.lockutils [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "56652181-0379-4532-9b2a-e6138cbd73ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 211.977s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2557.962309] env[61649]: DEBUG oslo_concurrency.lockutils [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "56652181-0379-4532-9b2a-e6138cbd73ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2557.962502] env[61649]: DEBUG oslo_concurrency.lockutils [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "56652181-0379-4532-9b2a-e6138cbd73ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2557.962658] env[61649]: DEBUG oslo_concurrency.lockutils [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "56652181-0379-4532-9b2a-e6138cbd73ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2557.964492] env[61649]: INFO nova.compute.manager [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Terminating instance [ 2557.966123] env[61649]: DEBUG nova.compute.manager [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2557.966312] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2557.966775] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9ea7ed3-5a1a-48a9-bf40-dbab5d2968bb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.976144] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3618d2d8-c519-41d5-8d87-135ab9bcf35e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.000382] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56652181-0379-4532-9b2a-e6138cbd73ad could not be found. [ 2558.000566] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2558.000740] env[61649]: INFO nova.compute.manager [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2558.001015] env[61649]: DEBUG oslo.service.loopingcall [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2558.001229] env[61649]: DEBUG nova.compute.manager [-] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2558.001320] env[61649]: DEBUG nova.network.neutron [-] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2558.017788] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e2715f0dab584ef98a31367a25a4734a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2558.022984] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2715f0dab584ef98a31367a25a4734a [ 2558.023327] env[61649]: DEBUG nova.network.neutron [-] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2558.023688] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cc23ffa849ec4db599846ee4add1cdf2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2558.030518] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc23ffa849ec4db599846ee4add1cdf2 [ 2558.031026] env[61649]: INFO nova.compute.manager [-] [instance: 56652181-0379-4532-9b2a-e6138cbd73ad] Took 0.03 seconds to deallocate network for instance. [ 2558.034395] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 3dddf36f4c614e6e8210432a3cd9c247 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2558.058270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dddf36f4c614e6e8210432a3cd9c247 [ 2558.071890] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg b6121766c69d47a091aad1f47702155a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2558.106087] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6121766c69d47a091aad1f47702155a [ 2558.109905] env[61649]: DEBUG oslo_concurrency.lockutils [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "56652181-0379-4532-9b2a-e6138cbd73ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.148s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2558.110431] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-10677e17-bdd7-44e9-8905-ba207d6313a5 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 62c6ef3cae004776a4fc6163671885c2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2558.120383] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62c6ef3cae004776a4fc6163671885c2 [ 2570.996066] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6897fe8339084f2c8ab0e17deecff5a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2571.006672] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6897fe8339084f2c8ab0e17deecff5a7 [ 2585.929092] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2585.929352] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2585.929494] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2588.929479] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2593.924644] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2594.929993] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2595.930004] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2595.930302] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2595.930342] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2595.931008] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg df788caa40764271bae9ae9d2d89babb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2595.943655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df788caa40764271bae9ae9d2d89babb [ 2595.945137] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2595.945306] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2595.945442] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2595.945567] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2595.945689] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2596.928604] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.929857] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.930228] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2597.930541] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d2d115d0c0384298947312914d3d5956 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2597.939358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2d115d0c0384298947312914d3d5956 [ 2597.940377] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2597.940648] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2597.940740] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2597.940897] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2597.941985] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4af48b4-3231-42bc-aabb-7a5f41fdd267 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.950672] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cde634-8dde-4592-9edb-6bb36898d934 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.964293] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba5b5d5-d64d-4e96-b3a1-33816dca6a12 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.970450] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b11b0b4-8d31-473d-b79b-1a7c1b939553 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2597.999997] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181829MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2598.000189] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2598.000450] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2598.001246] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b1d14051f0c74c9bb59417050755371e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2598.022549] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1d14051f0c74c9bb59417050755371e [ 2598.023878] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4eb5e15ff82647edb2069f9701de0f0a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2598.032448] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4eb5e15ff82647edb2069f9701de0f0a [ 2598.047689] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2598.047846] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2598.047977] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 574eac86-ac01-40f0-9e89-aa1fff108353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2598.048128] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0a50d7c8-4079-431b-bffa-b9e95b3a4cef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2598.048303] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2598.048442] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2598.108989] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1131e1f-f971-408b-bc5a-7427dfe9ea25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.116160] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885e6b70-600c-4274-b21c-79d3edaa3a7c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.145587] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2160f1-c510-410f-9d67-c2e5861b6386 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.152257] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad97b9c8-c121-41fa-b499-bf25790cf01b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2598.164728] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2598.165170] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 514191cdcd7b4578adbe8d4fe1fb99d6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2598.171994] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 514191cdcd7b4578adbe8d4fe1fb99d6 [ 2598.172847] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2598.174892] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 691d986491814e519065b9393648cdd8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2598.185203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 691d986491814e519065b9393648cdd8 [ 2598.185811] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2598.185983] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.186s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2605.180443] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2605.181085] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d637db808c2f4bee8634bf7b78a3badb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2605.192854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d637db808c2f4bee8634bf7b78a3badb [ 2606.439899] env[61649]: WARNING oslo_vmware.rw_handles [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2606.439899] env[61649]: ERROR oslo_vmware.rw_handles [ 2606.440581] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2606.442305] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2606.442571] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Copying Virtual Disk [datastore1] vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/e2bd99ec-a810-4dda-96b3-e812e2538116/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2606.442830] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b1ced71-1915-49ea-9812-b1ddd8e49b7c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.450680] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2606.450680] env[61649]: value = "task-158339" [ 2606.450680] env[61649]: _type = "Task" [ 2606.450680] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2606.459568] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2606.960968] env[61649]: DEBUG oslo_vmware.exceptions [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2606.961281] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2606.961847] env[61649]: ERROR nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2606.961847] env[61649]: Faults: ['InvalidArgument'] [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Traceback (most recent call last): [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] yield resources [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self.driver.spawn(context, instance, image_meta, [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self._fetch_image_if_missing(context, vi) [ 2606.961847] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] image_cache(vi, tmp_image_ds_loc) [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] vm_util.copy_virtual_disk( [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] session._wait_for_task(vmdk_copy_task) [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] return self.wait_for_task(task_ref) [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] return evt.wait() [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] result = hub.switch() [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2606.962225] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] return self.greenlet.switch() [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self.f(*self.args, **self.kw) [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] raise exceptions.translate_fault(task_info.error) [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Faults: ['InvalidArgument'] [ 2606.962637] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] [ 2606.962637] env[61649]: INFO nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Terminating instance [ 2606.963731] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2606.963940] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2606.964204] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0974adcf-7eb1-42b0-825e-4d931556600a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.966443] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2606.966638] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2606.967331] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d68f8f4-78d7-4106-b92f-365338b959d9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.974076] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2606.974376] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffb441e4-5131-4661-a211-3a0e61d12c41 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.976388] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2606.976566] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2606.977466] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03a18877-dc13-427b-bda4-63de3f9a0b82 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.981974] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2606.981974] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522e5dbd-c658-1894-ccbc-452f821fc3c7" [ 2606.981974] env[61649]: _type = "Task" [ 2606.981974] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2606.989207] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]522e5dbd-c658-1894-ccbc-452f821fc3c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.036461] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2607.036839] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2607.037090] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleting the datastore file [datastore1] 09dcd3bd-1baa-4276-b8c5-64de3de036f2 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2607.037385] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1dfc6a36-01f8-4eef-8e2c-0da0b5af7654 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.042958] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for the task: (returnval){ [ 2607.042958] env[61649]: value = "task-158341" [ 2607.042958] env[61649]: _type = "Task" [ 2607.042958] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2607.050270] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.492221] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2607.492555] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating directory with path [datastore1] vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2607.492694] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77559508-45b1-4c6a-890a-88a7961e8e89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.503303] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Created directory with path [datastore1] vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2607.503452] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Fetch image to [datastore1] vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2607.503622] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2607.504311] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bb1049-6541-4aa4-964d-4b517e73328a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.510449] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63fe73e-2cd3-4705-990c-d153de52a80a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.518799] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab63357-3cb8-48ff-9d5d-7e1f4db9d5f0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.550180] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b4629e-6935-440f-887d-8aefb05e702f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.557759] env[61649]: DEBUG oslo_vmware.api [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Task: {'id': task-158341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080657} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2607.559161] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2607.559347] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2607.559521] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2607.559697] env[61649]: INFO nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2607.561469] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3eafaf22-71e7-4703-bd2e-6ca30ded0cc0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.563273] env[61649]: DEBUG nova.compute.claims [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2607.563441] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2607.563650] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2607.565467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg c2c946bf8ed04103a4a697a4de2a5c00 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2607.584192] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2607.597375] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2c946bf8ed04103a4a697a4de2a5c00 [ 2607.636173] env[61649]: DEBUG oslo_vmware.rw_handles [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2607.695868] env[61649]: DEBUG oslo_vmware.rw_handles [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2607.696109] env[61649]: DEBUG oslo_vmware.rw_handles [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2607.705768] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b193eb5f-bf1e-44d5-8b92-bdf5216a45d2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.713775] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add6daa7-451f-4434-a227-d5aa1ea82843 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.742358] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4462874f-0574-4643-baad-3e1ed20038ec {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.748922] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0d4c0b-23f0-4d7e-8673-2fc3670aeb35 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.761608] env[61649]: DEBUG nova.compute.provider_tree [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2607.762049] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 39d1b3a34fc8440abccfdb76528d1604 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2607.769606] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39d1b3a34fc8440abccfdb76528d1604 [ 2607.770498] env[61649]: DEBUG nova.scheduler.client.report [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2607.772655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 09a15ab665464abc9bccdc841ae4192f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2607.782676] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09a15ab665464abc9bccdc841ae4192f [ 2607.783379] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.220s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2607.783901] env[61649]: ERROR nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2607.783901] env[61649]: Faults: ['InvalidArgument'] [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Traceback (most recent call last): [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self.driver.spawn(context, instance, image_meta, [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self._fetch_image_if_missing(context, vi) [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] image_cache(vi, tmp_image_ds_loc) [ 2607.783901] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] vm_util.copy_virtual_disk( [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] session._wait_for_task(vmdk_copy_task) [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] return self.wait_for_task(task_ref) [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] return evt.wait() [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] result = hub.switch() [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] return self.greenlet.switch() [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2607.784310] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] self.f(*self.args, **self.kw) [ 2607.784618] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2607.784618] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] raise exceptions.translate_fault(task_info.error) [ 2607.784618] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2607.784618] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Faults: ['InvalidArgument'] [ 2607.784618] env[61649]: ERROR nova.compute.manager [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] [ 2607.784618] env[61649]: DEBUG nova.compute.utils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2607.785941] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Build of instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 was re-scheduled: A specified parameter was not correct: fileType [ 2607.785941] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2607.786353] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2607.786532] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2607.786701] env[61649]: DEBUG nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2607.786866] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2608.016997] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 042189c6accc4f1697d12e7d1083ea0a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.024353] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 042189c6accc4f1697d12e7d1083ea0a [ 2608.025346] env[61649]: DEBUG nova.network.neutron [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2608.025820] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg aada0e3b7c274d0a955254fe9c50b36f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.037394] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aada0e3b7c274d0a955254fe9c50b36f [ 2608.038112] env[61649]: INFO nova.compute.manager [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Took 0.25 seconds to deallocate network for instance. [ 2608.039823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 5c801992bbc8433a8da5cc87c6d12c51 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.072279] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c801992bbc8433a8da5cc87c6d12c51 [ 2608.074845] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 406fe0c179d1451f918f0a356514c751 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.109104] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 406fe0c179d1451f918f0a356514c751 [ 2608.125795] env[61649]: INFO nova.scheduler.client.report [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Deleted allocations for instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 [ 2608.131894] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 3728bde600ad4138b4872c1dd6a34bd8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.145375] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3728bde600ad4138b4872c1dd6a34bd8 [ 2608.145923] env[61649]: DEBUG oslo_concurrency.lockutils [None req-c0a6fd43-3bee-4461-a2df-db038a8e6393 tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 401.411s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.146138] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 266.132s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.146335] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] During sync_power_state the instance has a pending task (spawning). Skip. [ 2608.146477] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.146689] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 205.549s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.146898] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Acquiring lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2608.147091] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.147246] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.149199] env[61649]: INFO nova.compute.manager [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Terminating instance [ 2608.150819] env[61649]: DEBUG nova.compute.manager [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2608.151049] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2608.151511] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d51fa41-fe0d-485e-913e-51f28164c4f1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.160587] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17fde87-95fe-4df0-b089-4290ebf3f2bd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.184918] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 09dcd3bd-1baa-4276-b8c5-64de3de036f2 could not be found. [ 2608.185128] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2608.185310] env[61649]: INFO nova.compute.manager [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2608.185555] env[61649]: DEBUG oslo.service.loopingcall [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2608.185776] env[61649]: DEBUG nova.compute.manager [-] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2608.185873] env[61649]: DEBUG nova.network.neutron [-] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2608.213431] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7ad1a81709d0464a974ccf6d15aec1bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.225328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ad1a81709d0464a974ccf6d15aec1bc [ 2608.225748] env[61649]: DEBUG nova.network.neutron [-] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2608.226160] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 78a2cf93cfad45d991e0f5f1a4c7f423 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.234158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78a2cf93cfad45d991e0f5f1a4c7f423 [ 2608.234158] env[61649]: INFO nova.compute.manager [-] [instance: 09dcd3bd-1baa-4276-b8c5-64de3de036f2] Took 0.05 seconds to deallocate network for instance. [ 2608.237448] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 85754ee381e049a691db9679f2197fd3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.262351] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85754ee381e049a691db9679f2197fd3 [ 2608.276364] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg effdb92deddd4d08b4ba89e4a7664464 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.313377] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg effdb92deddd4d08b4ba89e4a7664464 [ 2608.315953] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Lock "09dcd3bd-1baa-4276-b8c5-64de3de036f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.169s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.316344] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9bc47fe7-6583-4b5b-a100-f6926d45039c tempest-ServerDiskConfigTestJSON-783193802 tempest-ServerDiskConfigTestJSON-783193802-project-member] Expecting reply to msg 5b567c0859d84209ad9c6eb0b8a5f628 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2608.325679] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b567c0859d84209ad9c6eb0b8a5f628 [ 2646.281707] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 142be96bccc648a7aed3e324f218b373 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2646.290720] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 142be96bccc648a7aed3e324f218b373 [ 2646.291190] env[61649]: DEBUG oslo_concurrency.lockutils [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "574eac86-ac01-40f0-9e89-aa1fff108353" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2647.929294] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2647.929681] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2647.929681] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2649.929728] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2655.924115] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2656.324257] env[61649]: WARNING oslo_vmware.rw_handles [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2656.324257] env[61649]: ERROR oslo_vmware.rw_handles [ 2656.324657] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2656.326914] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2656.327153] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Copying Virtual Disk [datastore1] vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/1b9fc8d6-8de1-416f-b8bc-33afa7cc0262/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2656.327447] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12c70b9a-3cf4-464b-be0f-a9fa29e55081 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.338861] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2656.338861] env[61649]: value = "task-158342" [ 2656.338861] env[61649]: _type = "Task" [ 2656.338861] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2656.347823] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': task-158342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2656.848886] env[61649]: DEBUG oslo_vmware.exceptions [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2656.849170] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2656.849731] env[61649]: ERROR nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2656.849731] env[61649]: Faults: ['InvalidArgument'] [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Traceback (most recent call last): [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] yield resources [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self.driver.spawn(context, instance, image_meta, [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self._fetch_image_if_missing(context, vi) [ 2656.849731] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] image_cache(vi, tmp_image_ds_loc) [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] vm_util.copy_virtual_disk( [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] session._wait_for_task(vmdk_copy_task) [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] return self.wait_for_task(task_ref) [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] return evt.wait() [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] result = hub.switch() [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2656.850086] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] return self.greenlet.switch() [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self.f(*self.args, **self.kw) [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] raise exceptions.translate_fault(task_info.error) [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Faults: ['InvalidArgument'] [ 2656.850515] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] [ 2656.850515] env[61649]: INFO nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Terminating instance [ 2656.854087] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2656.854087] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2656.854087] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2656.854087] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2656.854087] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e46e0f9-c27c-45a2-b6b0-30be81c44bb2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.856139] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a31353c-4946-4d95-bf28-34254b8ebabc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.862475] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2656.862856] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd7b52f2-51ce-4a22-a77f-00f82b8844d3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.864999] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2656.865170] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2656.866076] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9463c0e5-c1ed-4169-806d-aa34b5ba72e4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.870723] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2656.870723] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]526c8865-3fe5-537e-5cfd-49421d276ae4" [ 2656.870723] env[61649]: _type = "Task" [ 2656.870723] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2656.877223] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]526c8865-3fe5-537e-5cfd-49421d276ae4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2656.928942] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2656.931506] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2656.931697] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2656.931875] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Deleting the datastore file [datastore1] ed66fa83-b203-4c7a-b1e5-d00547fa46c9 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2656.932140] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94e93385-0e3a-474f-b1f6-8f62b90023c4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.938293] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for the task: (returnval){ [ 2656.938293] env[61649]: value = "task-158344" [ 2656.938293] env[61649]: _type = "Task" [ 2656.938293] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2656.946049] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': task-158344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.380424] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2657.380649] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2657.380878] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-068c574c-24b4-4733-b0ec-98fa8605c240 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.391420] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2657.391607] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Fetch image to [datastore1] vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2657.391773] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2657.392537] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10fde65-d309-4ea3-a0d7-88039769a663 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.398498] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248c9c40-3575-42b2-bdc6-7006134fbb2b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.406935] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2f85be-226b-4537-b869-a61e9902cc5e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.436405] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a6e070-34f0-492f-93fe-bbb644a96d10 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.446897] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5d0da6bd-e243-48b4-a456-62709dc44fc4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.448529] env[61649]: DEBUG oslo_vmware.api [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Task: {'id': task-158344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06692} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2657.448767] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2657.448956] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2657.449157] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2657.449336] env[61649]: INFO nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2657.451471] env[61649]: DEBUG nova.compute.claims [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2657.451668] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2657.451904] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2657.453861] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg dad0921624484d988fe34c09add41dc7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.468493] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2657.490993] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dad0921624484d988fe34c09add41dc7 [ 2657.514865] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2657.574721] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2657.575089] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2657.594639] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2894eef5-9da9-49d0-89fc-7a768e452af7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.601716] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df291022-f656-450e-8225-dfeac236980d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.630748] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d90d46-2868-4eac-8931-20952e8ccbb7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.637284] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d467c4a-a838-46ce-999e-a401dc42bc1a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.649837] env[61649]: DEBUG nova.compute.provider_tree [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2657.650497] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 763e2999c6fa463ca2c634606698c4b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.658138] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 763e2999c6fa463ca2c634606698c4b7 [ 2657.659219] env[61649]: DEBUG nova.scheduler.client.report [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2657.661655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg c12bb9d6e6214ffdb46d883be59e3ce9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.672064] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c12bb9d6e6214ffdb46d883be59e3ce9 [ 2657.672863] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.221s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2657.673518] env[61649]: ERROR nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2657.673518] env[61649]: Faults: ['InvalidArgument'] [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Traceback (most recent call last): [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self.driver.spawn(context, instance, image_meta, [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self._fetch_image_if_missing(context, vi) [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] image_cache(vi, tmp_image_ds_loc) [ 2657.673518] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] vm_util.copy_virtual_disk( [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] session._wait_for_task(vmdk_copy_task) [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] return self.wait_for_task(task_ref) [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] return evt.wait() [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] result = hub.switch() [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] return self.greenlet.switch() [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2657.673864] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] self.f(*self.args, **self.kw) [ 2657.674204] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2657.674204] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] raise exceptions.translate_fault(task_info.error) [ 2657.674204] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2657.674204] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Faults: ['InvalidArgument'] [ 2657.674204] env[61649]: ERROR nova.compute.manager [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] [ 2657.674781] env[61649]: DEBUG nova.compute.utils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2657.676165] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Build of instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 was re-scheduled: A specified parameter was not correct: fileType [ 2657.676165] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2657.676555] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2657.676751] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2657.676979] env[61649]: DEBUG nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2657.677155] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2657.891770] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 9c75ff5f3514411ba73707d447e4fc91 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.901755] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c75ff5f3514411ba73707d447e4fc91 [ 2657.902347] env[61649]: DEBUG nova.network.neutron [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2657.903659] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 41418aeca6474f3b99a6969dd0ce4023 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.911932] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41418aeca6474f3b99a6969dd0ce4023 [ 2657.912564] env[61649]: INFO nova.compute.manager [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Took 0.24 seconds to deallocate network for instance. [ 2657.914467] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg d2d8732231de4d5b8821fa1bfc8c2892 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.928696] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2657.928857] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2657.928976] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2657.929503] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 7eba964eaea24877823e99b819e57d77 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.940386] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eba964eaea24877823e99b819e57d77 [ 2657.941456] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2657.941597] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2657.941723] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2657.942248] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2657.947157] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2d8732231de4d5b8821fa1bfc8c2892 [ 2657.949940] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg f994030bdd04410b8df4dd006f4ff16f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2657.978581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f994030bdd04410b8df4dd006f4ff16f [ 2657.997175] env[61649]: INFO nova.scheduler.client.report [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Deleted allocations for instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 [ 2658.003355] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 688f06838fb9419697b54db143a97ff7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2658.014246] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 688f06838fb9419697b54db143a97ff7 [ 2658.014803] env[61649]: DEBUG oslo_concurrency.lockutils [None req-395acc02-53f4-4e3a-9876-c4f9a4890730 tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 359.886s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.015141] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 316.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.015382] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] During sync_power_state the instance has a pending task (spawning). Skip. [ 2658.015566] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.015794] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 164.094s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.016018] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Acquiring lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2658.016221] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.016380] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.018271] env[61649]: INFO nova.compute.manager [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Terminating instance [ 2658.019996] env[61649]: DEBUG nova.compute.manager [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2658.020250] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2658.020774] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-756d47e5-68f4-4b85-b402-4fe6ca819992 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.032200] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb8308e-8fec-427b-91b9-fb07f9e72ed3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.056907] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ed66fa83-b203-4c7a-b1e5-d00547fa46c9 could not be found. [ 2658.057125] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2658.057309] env[61649]: INFO nova.compute.manager [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2658.057559] env[61649]: DEBUG oslo.service.loopingcall [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2658.057781] env[61649]: DEBUG nova.compute.manager [-] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2658.057884] env[61649]: DEBUG nova.network.neutron [-] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2658.074764] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f33a68f9128e4ed7871927e477164a8e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2658.080262] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f33a68f9128e4ed7871927e477164a8e [ 2658.080621] env[61649]: DEBUG nova.network.neutron [-] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2658.080991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 90945bc15b974e928d06a844fa8a3d97 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2658.087746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90945bc15b974e928d06a844fa8a3d97 [ 2658.088213] env[61649]: INFO nova.compute.manager [-] [instance: ed66fa83-b203-4c7a-b1e5-d00547fa46c9] Took 0.03 seconds to deallocate network for instance. [ 2658.091588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 9874c7be02b94c86935477305e32419f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2658.118649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9874c7be02b94c86935477305e32419f [ 2658.134080] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg 549d0e0c23fd43559021cc6b513d7eb5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2658.165841] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 549d0e0c23fd43559021cc6b513d7eb5 [ 2658.168643] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Lock "ed66fa83-b203-4c7a-b1e5-d00547fa46c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.153s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.168960] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d7e25885-e3c0-4257-8a82-3d301c5a1bbb tempest-AttachVolumeShelveTestJSON-1373181479 tempest-AttachVolumeShelveTestJSON-1373181479-project-member] Expecting reply to msg b59b556008224558b8f191ebc2084568 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2658.180272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b59b556008224558b8f191ebc2084568 [ 2658.929610] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2659.929285] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2659.929285] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4b20646803ab42a2be5f47e6763fbd23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2659.938358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b20646803ab42a2be5f47e6763fbd23 [ 2659.939778] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2659.940170] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2659.940243] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2659.940390] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2659.944045] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e253183a-a5a0-4922-b16b-17816ad95d49 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.949646] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7f3c0d-cc40-45e6-b96a-34bd68d80230 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.963317] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5201be71-ace9-4dfa-b7c8-f751699e4115 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.969470] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34e0212-7190-472c-a95f-410bbd116dbb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.998096] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181736MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2659.998249] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2659.998447] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2659.999262] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg aa6af0a8db2d4171a18e3eedce107d29 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2660.013250] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa6af0a8db2d4171a18e3eedce107d29 [ 2660.014773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d1ba7442364041f0b3244be87baae127 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2660.022887] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1ba7442364041f0b3244be87baae127 [ 2660.039247] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 574eac86-ac01-40f0-9e89-aa1fff108353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2660.039400] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0a50d7c8-4079-431b-bffa-b9e95b3a4cef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2660.039583] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2660.039722] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2660.078536] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190b322b-ca52-49f9-b100-bb0e9da00534 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.085596] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62623a4-18e3-459d-a9b9-0646ceb233cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.115818] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a32d77-a0f4-4a4d-970e-05de58dd9e70 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.122626] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d747ab-433a-46a6-b9de-1c62945502a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.135217] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2660.135762] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 60096b2563c742c9b739f45bd98d0684 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2660.142610] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60096b2563c742c9b739f45bd98d0684 [ 2660.143480] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2660.145649] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg de69b2d0bab441d09ba8d5baa00f9755 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2660.156184] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de69b2d0bab441d09ba8d5baa00f9755 [ 2660.156767] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2660.157260] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.158s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2660.157260] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2660.157260] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 2660.157537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 92e89e9a0f374e158f18ff48cc204cd1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2660.165126] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92e89e9a0f374e158f18ff48cc204cd1 [ 2663.930787] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2663.931210] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2fa227ee552e4d989334bf60eaa96138 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2663.938040] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fa227ee552e4d989334bf60eaa96138 [ 2668.937531] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2668.938023] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 2668.938343] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5788f9b705c24ca6a7a8c1157c878428 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2668.946991] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5788f9b705c24ca6a7a8c1157c878428 [ 2668.947610] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 2690.998664] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 01b97d79fcb849449f65e17c2f1c4bc3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2691.007790] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01b97d79fcb849449f65e17c2f1c4bc3 [ 2706.344825] env[61649]: WARNING oslo_vmware.rw_handles [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2706.344825] env[61649]: ERROR oslo_vmware.rw_handles [ 2706.345538] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2706.347568] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2706.347825] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Copying Virtual Disk [datastore1] vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/91b9547b-30bf-4586-8880-c7e20f3c5198/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2706.348124] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b07f4be5-fd9e-4ebf-8944-33e069fe7be0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.357549] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2706.357549] env[61649]: value = "task-158345" [ 2706.357549] env[61649]: _type = "Task" [ 2706.357549] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2706.365870] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2706.867766] env[61649]: DEBUG oslo_vmware.exceptions [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2706.868087] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2706.868642] env[61649]: ERROR nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2706.868642] env[61649]: Faults: ['InvalidArgument'] [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Traceback (most recent call last): [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] yield resources [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self.driver.spawn(context, instance, image_meta, [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self._fetch_image_if_missing(context, vi) [ 2706.868642] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] image_cache(vi, tmp_image_ds_loc) [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] vm_util.copy_virtual_disk( [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] session._wait_for_task(vmdk_copy_task) [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] return self.wait_for_task(task_ref) [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] return evt.wait() [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] result = hub.switch() [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2706.869037] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] return self.greenlet.switch() [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self.f(*self.args, **self.kw) [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] raise exceptions.translate_fault(task_info.error) [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Faults: ['InvalidArgument'] [ 2706.869445] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] [ 2706.869445] env[61649]: INFO nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Terminating instance [ 2706.871176] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2706.871176] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2706.871176] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25357c52-1991-4839-bb21-00fb2d611c4f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.873360] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2706.873550] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2706.874259] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0e835d-37be-4dc9-bfd3-ebbb7c0b91b4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.881167] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2706.881398] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94dade9f-43cf-43df-9f05-77789312b843 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.883563] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2706.883735] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2706.884692] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98c10391-6a61-452b-b61d-046bc5614872 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.889340] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2706.889340] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5208cc2a-d87e-16dc-6ced-090c8ec5f517" [ 2706.889340] env[61649]: _type = "Task" [ 2706.889340] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2706.896204] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5208cc2a-d87e-16dc-6ced-090c8ec5f517, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2706.946276] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2706.946599] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2706.946790] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleting the datastore file [datastore1] 574eac86-ac01-40f0-9e89-aa1fff108353 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2706.947046] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63c84104-8306-4b4c-9a0a-9a95eb16a7dd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.952932] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2706.952932] env[61649]: value = "task-158347" [ 2706.952932] env[61649]: _type = "Task" [ 2706.952932] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2706.960375] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2707.399731] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2707.400170] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2707.400250] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-060c7e56-79ed-45ed-b5bb-66e02f6fc933 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.410861] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2707.411073] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Fetch image to [datastore1] vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2707.411261] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2707.411926] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c0ce48-3b78-4e64-8102-3abe0941c3f4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.418262] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce57b62-5aba-48d9-a089-0098b7d50942 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.426885] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd84cec-32f7-49f1-9097-c32ff677a172 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.467168] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1b2ddd-79d6-40e7-9ca0-415e53b20764 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.474257] env[61649]: DEBUG oslo_vmware.api [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073071} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2707.475648] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2707.475827] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2707.476019] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2707.476203] env[61649]: INFO nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2707.478192] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-709fae3b-4fe6-409f-ab2c-a96775c0decd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.479879] env[61649]: DEBUG nova.compute.claims [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2707.480245] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2707.480615] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2707.482808] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e629ff4b843140a7ad5b4323f94ab8b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2707.502294] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2707.517351] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e629ff4b843140a7ad5b4323f94ab8b7 [ 2707.559435] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3187f579-ac06-45a1-a302-e24637eb4c98 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.563181] env[61649]: DEBUG oslo_vmware.rw_handles [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2707.620808] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e47ccc8-565d-4bc4-8fed-12b7d17bbcc2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.626652] env[61649]: DEBUG oslo_vmware.rw_handles [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2707.626818] env[61649]: DEBUG oslo_vmware.rw_handles [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2707.652411] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed80f08-0aa6-4034-9114-ac5a653b2671 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.659426] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd90c5d-b99d-4f79-afa2-9dc69d8209cd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.672264] env[61649]: DEBUG nova.compute.provider_tree [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2707.672762] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e72fc1cd996a4db5859e0f4658c3a08d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2707.680204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e72fc1cd996a4db5859e0f4658c3a08d [ 2707.681130] env[61649]: DEBUG nova.scheduler.client.report [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2707.683279] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 3d667a098c5f4eb28ba5c9a62dc7df82 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2707.694564] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d667a098c5f4eb28ba5c9a62dc7df82 [ 2707.695259] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.215s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2707.695792] env[61649]: ERROR nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2707.695792] env[61649]: Faults: ['InvalidArgument'] [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Traceback (most recent call last): [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self.driver.spawn(context, instance, image_meta, [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self._fetch_image_if_missing(context, vi) [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] image_cache(vi, tmp_image_ds_loc) [ 2707.695792] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] vm_util.copy_virtual_disk( [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] session._wait_for_task(vmdk_copy_task) [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] return self.wait_for_task(task_ref) [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] return evt.wait() [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] result = hub.switch() [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] return self.greenlet.switch() [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2707.696157] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] self.f(*self.args, **self.kw) [ 2707.696532] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2707.696532] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] raise exceptions.translate_fault(task_info.error) [ 2707.696532] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2707.696532] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Faults: ['InvalidArgument'] [ 2707.696532] env[61649]: ERROR nova.compute.manager [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] [ 2707.696532] env[61649]: DEBUG nova.compute.utils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2707.697863] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Build of instance 574eac86-ac01-40f0-9e89-aa1fff108353 was re-scheduled: A specified parameter was not correct: fileType [ 2707.697863] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2707.698246] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2707.698423] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2707.698593] env[61649]: DEBUG nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2707.698757] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2707.940177] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg ac07eb31344d469fab8c7126ce1665ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2707.947869] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac07eb31344d469fab8c7126ce1665ab [ 2707.948674] env[61649]: DEBUG nova.network.neutron [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2707.949319] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg de63524af8fc4ef590544402bcf2630d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2707.958055] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de63524af8fc4ef590544402bcf2630d [ 2707.959076] env[61649]: INFO nova.compute.manager [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Took 0.26 seconds to deallocate network for instance. [ 2707.961521] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 262a175beaca40389af1004952185193 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2707.995231] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 262a175beaca40389af1004952185193 [ 2707.997936] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 80ddc61b65ae4d9789c583df1cdd371f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.026998] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80ddc61b65ae4d9789c583df1cdd371f [ 2708.045420] env[61649]: INFO nova.scheduler.client.report [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted allocations for instance 574eac86-ac01-40f0-9e89-aa1fff108353 [ 2708.051535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 143909e5e9774b68b2f0978f73917f25 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.061361] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 143909e5e9774b68b2f0978f73917f25 [ 2708.061868] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a7d4c90-7a2e-4eea-99bc-a25ee46555aa tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "574eac86-ac01-40f0-9e89-aa1fff108353" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 256.809s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.062106] env[61649]: DEBUG oslo_concurrency.lockutils [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "574eac86-ac01-40f0-9e89-aa1fff108353" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 61.771s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.062350] env[61649]: DEBUG oslo_concurrency.lockutils [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "574eac86-ac01-40f0-9e89-aa1fff108353-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2708.062565] env[61649]: DEBUG oslo_concurrency.lockutils [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "574eac86-ac01-40f0-9e89-aa1fff108353-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.062734] env[61649]: DEBUG oslo_concurrency.lockutils [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "574eac86-ac01-40f0-9e89-aa1fff108353-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.064591] env[61649]: INFO nova.compute.manager [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Terminating instance [ 2708.066213] env[61649]: DEBUG nova.compute.manager [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2708.066403] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2708.066860] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dba1792a-1776-4ce2-a321-e5de9e087236 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.076536] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b65df10-8b9d-4f91-8d0d-4645500e38aa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.099871] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 574eac86-ac01-40f0-9e89-aa1fff108353 could not be found. [ 2708.100129] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2708.100323] env[61649]: INFO nova.compute.manager [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2708.100565] env[61649]: DEBUG oslo.service.loopingcall [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2708.100778] env[61649]: DEBUG nova.compute.manager [-] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2708.100871] env[61649]: DEBUG nova.network.neutron [-] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2708.120187] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 586204f3b3a04a26a2c3799960309797 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.126765] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 586204f3b3a04a26a2c3799960309797 [ 2708.127126] env[61649]: DEBUG nova.network.neutron [-] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2708.127582] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ea115fde608c4553b7c60e813d8daff4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.135009] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea115fde608c4553b7c60e813d8daff4 [ 2708.135471] env[61649]: INFO nova.compute.manager [-] [instance: 574eac86-ac01-40f0-9e89-aa1fff108353] Took 0.03 seconds to deallocate network for instance. [ 2708.138920] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg c44d9460e78245d5a1ecedb298ab8356 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.164843] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c44d9460e78245d5a1ecedb298ab8356 [ 2708.180827] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg fc049f9e932046c39a192dd9dcc33597 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.218150] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc049f9e932046c39a192dd9dcc33597 [ 2708.220973] env[61649]: DEBUG oslo_concurrency.lockutils [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "574eac86-ac01-40f0-9e89-aa1fff108353" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.221325] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-86990167-d0d9-4eff-952d-122337cc5f5e tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 6612cb378e334abf99c78ac566e62af7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2708.229048] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6612cb378e334abf99c78ac566e62af7 [ 2708.939291] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2708.939551] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2708.939605] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2709.051376] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "9c1ab5e2-d95e-46b3-9f98-835606a81b57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2709.051599] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "9c1ab5e2-d95e-46b3-9f98-835606a81b57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2709.052083] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg dee27f0627154259aded19de23a8902e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.062099] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dee27f0627154259aded19de23a8902e [ 2709.062490] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2709.064178] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d927ef0f11b940d88d8ea451d5e2bf23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.093567] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d927ef0f11b940d88d8ea451d5e2bf23 [ 2709.108032] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2709.108271] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2709.109591] env[61649]: INFO nova.compute.claims [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2709.111115] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b3b3e75ebe5c49139ff5fd184fd73b31 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.140875] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3b3e75ebe5c49139ff5fd184fd73b31 [ 2709.142391] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b92bb0ec5d2541eeacc7a6809fdab8b9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.166986] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b92bb0ec5d2541eeacc7a6809fdab8b9 [ 2709.204461] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0a9a59-5d24-4e62-b087-ce795d97ae25 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.213039] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c2792f-87d8-43de-864a-c3c7e3918fb0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.242099] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0330b98e-f613-41ca-b8af-80a7e797267b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.249102] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8342f064-98f8-4271-bb38-c57ed901c899 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.262076] env[61649]: DEBUG nova.compute.provider_tree [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2709.262542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d1c3762ac3594393a913755cd179f7d6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.269674] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1c3762ac3594393a913755cd179f7d6 [ 2709.270532] env[61649]: DEBUG nova.scheduler.client.report [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2709.272715] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 885733b9c4fd4267a6bc661a096449ac in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.282270] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 885733b9c4fd4267a6bc661a096449ac [ 2709.282889] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.175s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2709.283340] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2709.284926] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 0c674c95a7244c19b7ddd2632d9ca5cb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.312919] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c674c95a7244c19b7ddd2632d9ca5cb [ 2709.313956] env[61649]: DEBUG nova.compute.utils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2709.314537] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg bdaba3a019cb49d6917e95fe25827e9b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.315258] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2709.315448] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2709.323569] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdaba3a019cb49d6917e95fe25827e9b [ 2709.324088] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2709.325653] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 04536a2026a84e4b9bd86c4bed58906e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.351173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04536a2026a84e4b9bd86c4bed58906e [ 2709.353708] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 59cc3b3aff354327bca2a3dfcbb446a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2709.359599] env[61649]: DEBUG nova.policy [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc5f71ebe35b4863a38dd7606ae87937', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72501ae7a7dd4f85801c096912a5af36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2709.380611] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59cc3b3aff354327bca2a3dfcbb446a7 [ 2709.381648] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2709.402192] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2709.402420] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2709.402575] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2709.402752] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2709.402898] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2709.403044] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2709.403245] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2709.403419] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2709.403587] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2709.403749] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2709.403955] env[61649]: DEBUG nova.virt.hardware [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2709.404823] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13804710-b599-463e-8ff7-5d8319c03f9b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.412437] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a752ecd0-4cc9-4179-ac4d-85c2723a321d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2709.602086] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Successfully created port: 366e3973-ce62-450e-ad52-4ec5eaa41b03 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2710.052976] env[61649]: DEBUG nova.compute.manager [req-bc65f471-b093-4fe1-9089-35c10dc51453 req-353fd8fb-15aa-4493-9d98-bbd8fc75d1b7 service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Received event network-vif-plugged-366e3973-ce62-450e-ad52-4ec5eaa41b03 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2710.053249] env[61649]: DEBUG oslo_concurrency.lockutils [req-bc65f471-b093-4fe1-9089-35c10dc51453 req-353fd8fb-15aa-4493-9d98-bbd8fc75d1b7 service nova] Acquiring lock "9c1ab5e2-d95e-46b3-9f98-835606a81b57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2710.053407] env[61649]: DEBUG oslo_concurrency.lockutils [req-bc65f471-b093-4fe1-9089-35c10dc51453 req-353fd8fb-15aa-4493-9d98-bbd8fc75d1b7 service nova] Lock "9c1ab5e2-d95e-46b3-9f98-835606a81b57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2710.053572] env[61649]: DEBUG oslo_concurrency.lockutils [req-bc65f471-b093-4fe1-9089-35c10dc51453 req-353fd8fb-15aa-4493-9d98-bbd8fc75d1b7 service nova] Lock "9c1ab5e2-d95e-46b3-9f98-835606a81b57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2710.053737] env[61649]: DEBUG nova.compute.manager [req-bc65f471-b093-4fe1-9089-35c10dc51453 req-353fd8fb-15aa-4493-9d98-bbd8fc75d1b7 service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] No waiting events found dispatching network-vif-plugged-366e3973-ce62-450e-ad52-4ec5eaa41b03 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2710.053900] env[61649]: WARNING nova.compute.manager [req-bc65f471-b093-4fe1-9089-35c10dc51453 req-353fd8fb-15aa-4493-9d98-bbd8fc75d1b7 service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Received unexpected event network-vif-plugged-366e3973-ce62-450e-ad52-4ec5eaa41b03 for instance with vm_state building and task_state spawning. [ 2710.117742] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Successfully updated port: 366e3973-ce62-450e-ad52-4ec5eaa41b03 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2710.118509] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7d5946cad3614d74b6b81e237fc7fb34 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2710.125651] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d5946cad3614d74b6b81e237fc7fb34 [ 2710.126309] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "refresh_cache-9c1ab5e2-d95e-46b3-9f98-835606a81b57" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2710.126544] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "refresh_cache-9c1ab5e2-d95e-46b3-9f98-835606a81b57" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2710.126809] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2710.127251] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b8febe38983a41e3b000b792edfdf4e8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2710.134895] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8febe38983a41e3b000b792edfdf4e8 [ 2710.165165] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2710.301455] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Updating instance_info_cache with network_info: [{"id": "366e3973-ce62-450e-ad52-4ec5eaa41b03", "address": "fa:16:3e:a4:d1:c0", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap366e3973-ce", "ovs_interfaceid": "366e3973-ce62-450e-ad52-4ec5eaa41b03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2710.301960] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e06d7409702a47918383b40fad3d2a8b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2710.313853] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e06d7409702a47918383b40fad3d2a8b [ 2710.314386] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "refresh_cache-9c1ab5e2-d95e-46b3-9f98-835606a81b57" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2710.314661] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Instance network_info: |[{"id": "366e3973-ce62-450e-ad52-4ec5eaa41b03", "address": "fa:16:3e:a4:d1:c0", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap366e3973-ce", "ovs_interfaceid": "366e3973-ce62-450e-ad52-4ec5eaa41b03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2710.315045] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:d1:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '366e3973-ce62-450e-ad52-4ec5eaa41b03', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2710.322380] env[61649]: DEBUG oslo.service.loopingcall [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2710.322804] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2710.323024] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f803d15f-745a-401f-9df5-fffbd8ac283b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2710.342485] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2710.342485] env[61649]: value = "task-158348" [ 2710.342485] env[61649]: _type = "Task" [ 2710.342485] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2710.349644] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158348, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2710.852636] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158348, 'name': CreateVM_Task, 'duration_secs': 0.313665} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2710.852822] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2710.853488] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2710.853685] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2710.854080] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2710.854331] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34dc799e-4e38-4aa9-b813-dd9ea942e907 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2710.859626] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2710.859626] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5262d98f-7cd8-cb9c-a292-ce45bedd98a9" [ 2710.859626] env[61649]: _type = "Task" [ 2710.859626] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2710.870698] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5262d98f-7cd8-cb9c-a292-ce45bedd98a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2710.929289] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2711.369555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2711.369887] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2711.370045] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2712.094140] env[61649]: DEBUG nova.compute.manager [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Received event network-changed-366e3973-ce62-450e-ad52-4ec5eaa41b03 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2712.094332] env[61649]: DEBUG nova.compute.manager [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Refreshing instance network info cache due to event network-changed-366e3973-ce62-450e-ad52-4ec5eaa41b03. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2712.094542] env[61649]: DEBUG oslo_concurrency.lockutils [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] Acquiring lock "refresh_cache-9c1ab5e2-d95e-46b3-9f98-835606a81b57" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2712.094677] env[61649]: DEBUG oslo_concurrency.lockutils [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] Acquired lock "refresh_cache-9c1ab5e2-d95e-46b3-9f98-835606a81b57" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2712.094830] env[61649]: DEBUG nova.network.neutron [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Refreshing network info cache for port 366e3973-ce62-450e-ad52-4ec5eaa41b03 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2712.095347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] Expecting reply to msg 993451505aea452ca401032c6cf1e2c1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2712.102076] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 993451505aea452ca401032c6cf1e2c1 [ 2712.311786] env[61649]: DEBUG nova.network.neutron [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Updated VIF entry in instance network info cache for port 366e3973-ce62-450e-ad52-4ec5eaa41b03. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2712.312189] env[61649]: DEBUG nova.network.neutron [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Updating instance_info_cache with network_info: [{"id": "366e3973-ce62-450e-ad52-4ec5eaa41b03", "address": "fa:16:3e:a4:d1:c0", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap366e3973-ce", "ovs_interfaceid": "366e3973-ce62-450e-ad52-4ec5eaa41b03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2712.312787] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] Expecting reply to msg 35e7be0cf57f478c934f969f3df4edc6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2712.324036] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35e7be0cf57f478c934f969f3df4edc6 [ 2712.324036] env[61649]: DEBUG oslo_concurrency.lockutils [req-82d34bd2-4a7f-4f81-812f-2df2b444cb79 req-581f0f60-e8b1-43a1-83b1-3d01703fea0b service nova] Releasing lock "refresh_cache-9c1ab5e2-d95e-46b3-9f98-835606a81b57" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2715.923537] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2717.928813] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2718.929712] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.929931] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.930193] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2719.930291] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2719.930854] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 4cbc4d43f6dd4453b92d65ce3d94ac31 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2719.941719] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cbc4d43f6dd4453b92d65ce3d94ac31 [ 2719.943173] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2719.943261] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2719.943403] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2719.943963] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.944222] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2719.944606] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 64c5ed6f89c14acc8ee938cfabeb267b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2719.953222] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64c5ed6f89c14acc8ee938cfabeb267b [ 2719.954108] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2719.954334] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2719.954501] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2719.954650] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2719.955683] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7008f349-d4c9-4f10-8ead-ac85d343002f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2719.965178] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a7e2d1-4a8e-4210-8549-6f5a62a6c589 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2719.978529] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ac42a8-1882-4760-9f9d-bb9dcc75e992 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2719.984483] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde73af8-02b6-4738-9c47-b5c22a6b5385 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2720.012694] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181840MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2720.012844] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2720.013015] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2720.013778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 91c9c8178f284b83ac9c5054cca6b8af in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2720.027427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91c9c8178f284b83ac9c5054cca6b8af [ 2720.028905] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 93745535d8a4490e90ced26545e2888e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2720.036662] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93745535d8a4490e90ced26545e2888e [ 2720.105048] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 0a50d7c8-4079-431b-bffa-b9e95b3a4cef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2720.105205] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9c1ab5e2-d95e-46b3-9f98-835606a81b57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2720.105380] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2720.105518] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2720.120306] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2720.131950] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2720.132159] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2720.141539] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2720.155890] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2720.188831] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc330ea-5c96-475e-b2a0-c8869ec21888 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2720.195806] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccc1cf8-dfd3-46de-84a7-e3899f73ade9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2720.225376] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9ee99f-4bf7-4e7c-aaa0-403454e0fd42 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2720.231774] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a905b806-e7cb-4b5d-a567-37b91f2190a6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2720.244039] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2720.244478] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3d9c0b46f32b4412b4c2dd8785ce7b10 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2720.251598] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d9c0b46f32b4412b4c2dd8785ce7b10 [ 2720.252501] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2720.254601] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1c1a7deed3ca41ebad13ccc4d9c02164 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2720.265940] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c1a7deed3ca41ebad13ccc4d9c02164 [ 2720.266549] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2720.266721] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.254s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2727.262391] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2727.263025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 847dd7d12a5a4aeaaeec4fa8f35ec06f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2727.273063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 847dd7d12a5a4aeaaeec4fa8f35ec06f [ 2737.842561] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 3cbbb5976acd41df8b4912207ebb9f89 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2737.850424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cbbb5976acd41df8b4912207ebb9f89 [ 2737.850867] env[61649]: DEBUG oslo_concurrency.lockutils [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2755.615087] env[61649]: WARNING oslo_vmware.rw_handles [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2755.615087] env[61649]: ERROR oslo_vmware.rw_handles [ 2755.615635] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2755.617430] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2755.617677] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Copying Virtual Disk [datastore1] vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/24d0bfc5-a854-4e3e-890a-fab1f4941e15/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2755.617962] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a303a987-c3e5-4140-a94c-d71eb2279cb2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2755.626462] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2755.626462] env[61649]: value = "task-158349" [ 2755.626462] env[61649]: _type = "Task" [ 2755.626462] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2755.633657] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158349, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2756.136626] env[61649]: DEBUG oslo_vmware.exceptions [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2756.136885] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2756.137497] env[61649]: ERROR nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2756.137497] env[61649]: Faults: ['InvalidArgument'] [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Traceback (most recent call last): [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] yield resources [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self.driver.spawn(context, instance, image_meta, [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self._fetch_image_if_missing(context, vi) [ 2756.137497] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] image_cache(vi, tmp_image_ds_loc) [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] vm_util.copy_virtual_disk( [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] session._wait_for_task(vmdk_copy_task) [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] return self.wait_for_task(task_ref) [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] return evt.wait() [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] result = hub.switch() [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2756.137858] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] return self.greenlet.switch() [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self.f(*self.args, **self.kw) [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] raise exceptions.translate_fault(task_info.error) [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Faults: ['InvalidArgument'] [ 2756.138195] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] [ 2756.138195] env[61649]: INFO nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Terminating instance [ 2756.139911] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2756.140206] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2756.140822] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2756.141048] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2756.141294] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4770de39-af74-4eff-9bdf-f60abec53910 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.144792] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e2b76e-cefe-441c-941a-6b189e1ecf33 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.151332] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2756.151534] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8299b2c7-6370-4e94-88c2-df58640ce356 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.153602] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2756.153773] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2756.154656] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6733b10f-6926-41d7-b5ab-7e97b319473a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.159062] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2756.159062] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e669c1-3fbb-7a91-cd0d-7987d86f32ce" [ 2756.159062] env[61649]: _type = "Task" [ 2756.159062] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2756.166172] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e669c1-3fbb-7a91-cd0d-7987d86f32ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2756.671275] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2756.671682] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2756.672035] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84edd3d9-638d-446f-8e77-9f536de53831 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.692773] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2756.693100] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Fetch image to [datastore1] vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2756.693401] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2756.694298] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49aaa23-d36c-4a6e-af28-914b77b4a4b0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.701058] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1176dbae-40df-478a-9765-4e8d551db39a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.709666] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef65dd28-3997-4435-864e-cbfb27161b53 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.738807] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b8bc24-21bd-46fd-9de4-981912b9fb9b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.743849] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-99874c1e-105a-4c6e-8288-a9dadf96fc57 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.764401] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2756.807653] env[61649]: DEBUG oslo_vmware.rw_handles [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2756.867131] env[61649]: DEBUG oslo_vmware.rw_handles [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2756.867371] env[61649]: DEBUG oslo_vmware.rw_handles [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2757.225469] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2757.225748] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2757.225873] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleting the datastore file [datastore1] 0a50d7c8-4079-431b-bffa-b9e95b3a4cef {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2757.226154] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c233790-0cef-47e7-a465-40f5a2078d2d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.231918] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2757.231918] env[61649]: value = "task-158351" [ 2757.231918] env[61649]: _type = "Task" [ 2757.231918] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2757.239197] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2757.742126] env[61649]: DEBUG oslo_vmware.api [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079662} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2757.742421] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2757.742553] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2757.742726] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2757.742898] env[61649]: INFO nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Took 1.60 seconds to destroy the instance on the hypervisor. [ 2757.745006] env[61649]: DEBUG nova.compute.claims [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2757.745181] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2757.745392] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2757.747189] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 234a5a5fce824c37b5132bea9c07105a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2757.777033] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 234a5a5fce824c37b5132bea9c07105a [ 2757.815405] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0f7ad2-6f67-4122-bd06-5cf8eac99c0b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.824467] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b3e63f-3104-4a92-90d2-4d6a47ce4df7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.855327] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0804f820-74d8-43e1-b8e3-b325375fc517 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.862519] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b45874-8f31-4a39-908c-7a05fce2bcbb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.875159] env[61649]: DEBUG nova.compute.provider_tree [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2757.875635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 8f103ac2c9cc493c9ff56676f1b29433 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2757.883021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f103ac2c9cc493c9ff56676f1b29433 [ 2757.883891] env[61649]: DEBUG nova.scheduler.client.report [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2757.886064] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg a4eda42b7db34516a71bef7f4453c4cf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2757.896173] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4eda42b7db34516a71bef7f4453c4cf [ 2757.896845] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.151s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2757.897392] env[61649]: ERROR nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2757.897392] env[61649]: Faults: ['InvalidArgument'] [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Traceback (most recent call last): [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self.driver.spawn(context, instance, image_meta, [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self._fetch_image_if_missing(context, vi) [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] image_cache(vi, tmp_image_ds_loc) [ 2757.897392] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] vm_util.copy_virtual_disk( [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] session._wait_for_task(vmdk_copy_task) [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] return self.wait_for_task(task_ref) [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] return evt.wait() [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] result = hub.switch() [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] return self.greenlet.switch() [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2757.897733] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] self.f(*self.args, **self.kw) [ 2757.898059] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2757.898059] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] raise exceptions.translate_fault(task_info.error) [ 2757.898059] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2757.898059] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Faults: ['InvalidArgument'] [ 2757.898059] env[61649]: ERROR nova.compute.manager [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] [ 2757.898059] env[61649]: DEBUG nova.compute.utils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2757.899662] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Build of instance 0a50d7c8-4079-431b-bffa-b9e95b3a4cef was re-scheduled: A specified parameter was not correct: fileType [ 2757.899662] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2757.900079] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2757.900264] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2757.900435] env[61649]: DEBUG nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2757.900600] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2758.101731] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 1a31fdd8d9344737a3431585868dbf25 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.109987] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a31fdd8d9344737a3431585868dbf25 [ 2758.110558] env[61649]: DEBUG nova.network.neutron [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2758.111079] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 98661aa700bf49d18f5f8127edb3eb1e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.122754] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98661aa700bf49d18f5f8127edb3eb1e [ 2758.123218] env[61649]: INFO nova.compute.manager [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Took 0.22 seconds to deallocate network for instance. [ 2758.124862] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 93ba39a9d7ac47838936bb48633d5ed0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.157460] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93ba39a9d7ac47838936bb48633d5ed0 [ 2758.160279] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 81e2cc8195214ee4bcb055e39546d764 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.188532] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81e2cc8195214ee4bcb055e39546d764 [ 2758.207065] env[61649]: INFO nova.scheduler.client.report [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted allocations for instance 0a50d7c8-4079-431b-bffa-b9e95b3a4cef [ 2758.213167] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg e2dc50c55cd440c3b885ce3efb76f80c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.223081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2dc50c55cd440c3b885ce3efb76f80c [ 2758.223561] env[61649]: DEBUG oslo_concurrency.lockutils [None req-94ce8b82-06a6-49c4-828c-6c25f2aa2883 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.478s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.223777] env[61649]: DEBUG oslo_concurrency.lockutils [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 20.373s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.223983] env[61649]: DEBUG oslo_concurrency.lockutils [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2758.224202] env[61649]: DEBUG oslo_concurrency.lockutils [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.224360] env[61649]: DEBUG oslo_concurrency.lockutils [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.226141] env[61649]: INFO nova.compute.manager [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Terminating instance [ 2758.227788] env[61649]: DEBUG nova.compute.manager [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2758.228043] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2758.228436] env[61649]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ffc4733-2864-4384-b6c4-d1e1ac42f978 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.237032] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf6144d-b8eb-4a6b-892a-8941ffb2676f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.261294] env[61649]: WARNING nova.virt.vmwareapi.vmops [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a50d7c8-4079-431b-bffa-b9e95b3a4cef could not be found. [ 2758.261491] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2758.261671] env[61649]: INFO nova.compute.manager [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2758.261907] env[61649]: DEBUG oslo.service.loopingcall [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2758.262106] env[61649]: DEBUG nova.compute.manager [-] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2758.262204] env[61649]: DEBUG nova.network.neutron [-] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2758.279629] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5e564ad4597949cc9db81f04619ce9f1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.285075] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e564ad4597949cc9db81f04619ce9f1 [ 2758.285412] env[61649]: DEBUG nova.network.neutron [-] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2758.285775] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 38f82b9220174c48955449da4d97b67b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.292822] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38f82b9220174c48955449da4d97b67b [ 2758.293242] env[61649]: INFO nova.compute.manager [-] [instance: 0a50d7c8-4079-431b-bffa-b9e95b3a4cef] Took 0.03 seconds to deallocate network for instance. [ 2758.296464] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg d50ddf7942a84a829ddada082eba38f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.319605] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d50ddf7942a84a829ddada082eba38f2 [ 2758.333117] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 1a6254729c4d4c1bb484ed218fdc7508 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.366741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a6254729c4d4c1bb484ed218fdc7508 [ 2758.369222] env[61649]: DEBUG oslo_concurrency.lockutils [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "0a50d7c8-4079-431b-bffa-b9e95b3a4cef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.145s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.369533] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-75ca2d8a-1cf3-4178-9537-6be422c81824 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 92913845fa494524bc5d7ce8e42acdbc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2758.378637] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92913845fa494524bc5d7ce8e42acdbc [ 2759.417892] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "ba9f78f8-b051-4d42-97dc-5fa9c19753f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2759.418183] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "ba9f78f8-b051-4d42-97dc-5fa9c19753f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2759.418608] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 0132264f2010459ea8a94de3179dd9bc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.427021] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0132264f2010459ea8a94de3179dd9bc [ 2759.427436] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2759.429007] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg f7847031076d499996708775ea6b720c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.455967] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7847031076d499996708775ea6b720c [ 2759.515484] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2759.515484] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2759.515484] env[61649]: INFO nova.compute.claims [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2759.515484] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 93d41f8365b744489184aa41fa7eb380 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.521456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93d41f8365b744489184aa41fa7eb380 [ 2759.523242] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg f9d09fbb50f74aa894dbe23ba62cce0a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.530713] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9d09fbb50f74aa894dbe23ba62cce0a [ 2759.568547] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6725e619-6ea8-42af-b18e-ce79cea7627f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.575669] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb3f148-e926-4371-ae8a-3b39eca2791f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.605504] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06259a74-c9fc-4086-9e92-e96cbbf24ff6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.612111] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7b1cf1-de58-40e4-a6f8-2f4780e4be9c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.624429] env[61649]: DEBUG nova.compute.provider_tree [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2759.624887] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg e54ca5f7e87b4104897e3fdc99b89cfa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.631482] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54ca5f7e87b4104897e3fdc99b89cfa [ 2759.632322] env[61649]: DEBUG nova.scheduler.client.report [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2759.634424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 0ac9c171c257496c99487e39dce09f02 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.646067] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ac9c171c257496c99487e39dce09f02 [ 2759.646667] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.175s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2759.647093] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2759.648675] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 3ce69f1d2174445f92866e57490b4852 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.681158] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ce69f1d2174445f92866e57490b4852 [ 2759.682318] env[61649]: DEBUG nova.compute.utils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2759.682893] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 335400ed0d9b40d88afe5eb243688cf7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.684211] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2759.684440] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2759.693384] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 335400ed0d9b40d88afe5eb243688cf7 [ 2759.695936] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2759.695936] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 70ea78c8737a4d8e98dbc5fae98248d8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.725376] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70ea78c8737a4d8e98dbc5fae98248d8 [ 2759.727933] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 07fe77c7b99d4f62910b430c033fefd7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2759.730296] env[61649]: DEBUG nova.policy [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4eecfef918474dc8ad298d9eb189f56f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3939f446f6f04aa08a0b91101e55572b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2759.755594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07fe77c7b99d4f62910b430c033fefd7 [ 2759.756735] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2759.777394] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2759.777730] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2759.777954] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2759.778206] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2759.778417] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2759.778625] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2759.778893] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2759.779134] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2759.779387] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2759.779615] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2759.779854] env[61649]: DEBUG nova.virt.hardware [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2759.780788] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051ea495-33fc-4da0-b902-3c5716e40cb6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.788981] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34a57d9-ca8b-4864-bc38-2f8a5c9e506f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2760.019087] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Successfully created port: 2fefde10-013f-4ed8-a1b7-947ac0f0d442 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2760.426003] env[61649]: DEBUG nova.compute.manager [req-e359ea5b-43d3-4185-a4f0-c4b2106496b8 req-e8b8e318-8d67-4a40-aad1-c7a748b40152 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Received event network-vif-plugged-2fefde10-013f-4ed8-a1b7-947ac0f0d442 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2760.426279] env[61649]: DEBUG oslo_concurrency.lockutils [req-e359ea5b-43d3-4185-a4f0-c4b2106496b8 req-e8b8e318-8d67-4a40-aad1-c7a748b40152 service nova] Acquiring lock "ba9f78f8-b051-4d42-97dc-5fa9c19753f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2760.426570] env[61649]: DEBUG oslo_concurrency.lockutils [req-e359ea5b-43d3-4185-a4f0-c4b2106496b8 req-e8b8e318-8d67-4a40-aad1-c7a748b40152 service nova] Lock "ba9f78f8-b051-4d42-97dc-5fa9c19753f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2760.426886] env[61649]: DEBUG oslo_concurrency.lockutils [req-e359ea5b-43d3-4185-a4f0-c4b2106496b8 req-e8b8e318-8d67-4a40-aad1-c7a748b40152 service nova] Lock "ba9f78f8-b051-4d42-97dc-5fa9c19753f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2760.427138] env[61649]: DEBUG nova.compute.manager [req-e359ea5b-43d3-4185-a4f0-c4b2106496b8 req-e8b8e318-8d67-4a40-aad1-c7a748b40152 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] No waiting events found dispatching network-vif-plugged-2fefde10-013f-4ed8-a1b7-947ac0f0d442 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2760.427369] env[61649]: WARNING nova.compute.manager [req-e359ea5b-43d3-4185-a4f0-c4b2106496b8 req-e8b8e318-8d67-4a40-aad1-c7a748b40152 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Received unexpected event network-vif-plugged-2fefde10-013f-4ed8-a1b7-947ac0f0d442 for instance with vm_state building and task_state spawning. [ 2760.491407] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Successfully updated port: 2fefde10-013f-4ed8-a1b7-947ac0f0d442 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2760.492192] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 050cc9113bab4f95a0dc6f46bfa1d5e7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2760.499568] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 050cc9113bab4f95a0dc6f46bfa1d5e7 [ 2760.500718] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "refresh_cache-ba9f78f8-b051-4d42-97dc-5fa9c19753f3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2760.501023] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "refresh_cache-ba9f78f8-b051-4d42-97dc-5fa9c19753f3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2760.501242] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2760.501666] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 24ec07c38ff94132ab96d69fd947e314 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2760.510426] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24ec07c38ff94132ab96d69fd947e314 [ 2760.538828] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2760.670214] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Updating instance_info_cache with network_info: [{"id": "2fefde10-013f-4ed8-a1b7-947ac0f0d442", "address": "fa:16:3e:76:30:80", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fefde10-01", "ovs_interfaceid": "2fefde10-013f-4ed8-a1b7-947ac0f0d442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2760.670835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 6db9a11e79254dcfbae33759972e3d47 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2760.680654] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6db9a11e79254dcfbae33759972e3d47 [ 2760.681304] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "refresh_cache-ba9f78f8-b051-4d42-97dc-5fa9c19753f3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2760.681639] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Instance network_info: |[{"id": "2fefde10-013f-4ed8-a1b7-947ac0f0d442", "address": "fa:16:3e:76:30:80", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fefde10-01", "ovs_interfaceid": "2fefde10-013f-4ed8-a1b7-947ac0f0d442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2760.682082] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:30:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2fefde10-013f-4ed8-a1b7-947ac0f0d442', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2760.689740] env[61649]: DEBUG oslo.service.loopingcall [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2760.690465] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2760.690758] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ef923cc-3a31-4682-abe4-dfce34b4a002 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2760.711751] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2760.711751] env[61649]: value = "task-158352" [ 2760.711751] env[61649]: _type = "Task" [ 2760.711751] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2760.720639] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158352, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.221888] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158352, 'name': CreateVM_Task, 'duration_secs': 0.293103} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2761.222175] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2761.222965] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2761.223250] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2761.223625] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2761.223926] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55484238-8b00-436a-bdb3-e8b9ff5d5cd2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2761.228189] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2761.228189] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5242f99c-ab18-bab2-dec1-42540a99ad02" [ 2761.228189] env[61649]: _type = "Task" [ 2761.228189] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2761.235191] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5242f99c-ab18-bab2-dec1-42540a99ad02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.738701] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2761.739058] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2761.739315] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2762.453329] env[61649]: DEBUG nova.compute.manager [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Received event network-changed-2fefde10-013f-4ed8-a1b7-947ac0f0d442 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2762.453604] env[61649]: DEBUG nova.compute.manager [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Refreshing instance network info cache due to event network-changed-2fefde10-013f-4ed8-a1b7-947ac0f0d442. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2762.453895] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] Acquiring lock "refresh_cache-ba9f78f8-b051-4d42-97dc-5fa9c19753f3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2762.454103] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] Acquired lock "refresh_cache-ba9f78f8-b051-4d42-97dc-5fa9c19753f3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2762.454326] env[61649]: DEBUG nova.network.neutron [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Refreshing network info cache for port 2fefde10-013f-4ed8-a1b7-947ac0f0d442 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2762.454842] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] Expecting reply to msg cfdad468cec14317a2584a58c2192a28 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2762.461956] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfdad468cec14317a2584a58c2192a28 [ 2762.666161] env[61649]: DEBUG nova.network.neutron [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Updated VIF entry in instance network info cache for port 2fefde10-013f-4ed8-a1b7-947ac0f0d442. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2762.666593] env[61649]: DEBUG nova.network.neutron [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Updating instance_info_cache with network_info: [{"id": "2fefde10-013f-4ed8-a1b7-947ac0f0d442", "address": "fa:16:3e:76:30:80", "network": {"id": "e70549f5-8c32-456f-9488-fdfff63b2dc8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2020544300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3939f446f6f04aa08a0b91101e55572b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fefde10-01", "ovs_interfaceid": "2fefde10-013f-4ed8-a1b7-947ac0f0d442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2762.667196] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] Expecting reply to msg 5a26f01c31154552aee83cf107f6dfa8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2762.675385] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a26f01c31154552aee83cf107f6dfa8 [ 2762.676046] env[61649]: DEBUG oslo_concurrency.lockutils [req-6ec0f3d3-f6ce-4777-b055-19fbddf46c88 req-d77259ac-9875-4394-9191-54c539b6f576 service nova] Releasing lock "refresh_cache-ba9f78f8-b051-4d42-97dc-5fa9c19753f3" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2768.929650] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2768.930047] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2768.930047] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2770.929100] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2775.923488] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2778.930271] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2779.929094] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2779.929344] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.929074] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.929495] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2780.929495] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2780.930053] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 722929d7019845b3a3649ff0412d5722 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2780.940840] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 722929d7019845b3a3649ff0412d5722 [ 2780.941890] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2780.942047] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2780.942185] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2780.942633] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.942923] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 57edc4f242d449758e22a3342790b8b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2780.952025] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57edc4f242d449758e22a3342790b8b7 [ 2780.952025] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2780.952243] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2780.952411] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2780.952565] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2780.953861] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e147cd3-176e-4128-95e3-b957bbb81897 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2780.962441] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabf6abd-e142-46c1-8726-0f3b403b9d89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2780.976383] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28313be2-f9c2-4973-a946-0fafdc416677 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2780.982464] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfc2de2-5467-407c-9912-d4ee770a11a0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.011815] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181822MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2781.011960] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2781.012171] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2781.012928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 31fdc2ac3cba4b36a22f453ba9e85abf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2781.026232] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31fdc2ac3cba4b36a22f453ba9e85abf [ 2781.027633] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f24ea89a36c14062b6f5ab40982e1445 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2781.035851] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f24ea89a36c14062b6f5ab40982e1445 [ 2781.052031] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9c1ab5e2-d95e-46b3-9f98-835606a81b57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2781.052031] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ba9f78f8-b051-4d42-97dc-5fa9c19753f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2781.052031] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2781.052031] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2781.087516] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5200c85-7142-46f2-a060-b564692610e3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.094619] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cb1045-f072-45a1-87c5-361e19505292 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.123399] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16be6d4b-084c-4ba9-a0c0-2721e8ce3f3d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.130054] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018963ae-c09e-498a-a448-2464d8f615d6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2781.142580] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2781.143004] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d4597119e3b645599be8191e5cfff496 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2781.149595] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4597119e3b645599be8191e5cfff496 [ 2781.150445] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2781.152510] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1d3e09eba85a43a9a5b4a1543f8e9a9d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2781.166998] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d3e09eba85a43a9a5b4a1543f8e9a9d [ 2781.167601] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2781.167777] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.156s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2806.506778] env[61649]: WARNING oslo_vmware.rw_handles [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2806.506778] env[61649]: ERROR oslo_vmware.rw_handles [ 2806.507389] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2806.509401] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2806.509622] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Copying Virtual Disk [datastore1] vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/202e3c75-dba0-46bb-86e5-3bfb2a9e403c/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2806.509921] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df2e9863-c155-48c0-9fdb-3c302c1ab89b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2806.519540] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2806.519540] env[61649]: value = "task-158353" [ 2806.519540] env[61649]: _type = "Task" [ 2806.519540] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2806.527152] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2807.030103] env[61649]: DEBUG oslo_vmware.exceptions [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2807.030403] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2807.030956] env[61649]: ERROR nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2807.030956] env[61649]: Faults: ['InvalidArgument'] [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Traceback (most recent call last): [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] yield resources [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self.driver.spawn(context, instance, image_meta, [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self._fetch_image_if_missing(context, vi) [ 2807.030956] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] image_cache(vi, tmp_image_ds_loc) [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] vm_util.copy_virtual_disk( [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] session._wait_for_task(vmdk_copy_task) [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] return self.wait_for_task(task_ref) [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] return evt.wait() [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] result = hub.switch() [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2807.031327] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] return self.greenlet.switch() [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self.f(*self.args, **self.kw) [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] raise exceptions.translate_fault(task_info.error) [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Faults: ['InvalidArgument'] [ 2807.031675] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] [ 2807.031675] env[61649]: INFO nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Terminating instance [ 2807.032881] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2807.033096] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2807.033348] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec3e6ba8-d693-485a-a5f4-ac1da20d1d51 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.035592] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2807.035789] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2807.036551] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0996143c-d704-4afe-acc4-6f3637d4fb39 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.043503] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2807.043743] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04770a5f-40e4-43e3-b372-771f11b14eac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.045903] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2807.046078] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2807.047041] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30d3327c-6b4c-41da-8c19-231cbb0d666a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.051551] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2807.051551] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e15c25-4eda-b825-28c8-bb82384f0383" [ 2807.051551] env[61649]: _type = "Task" [ 2807.051551] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2807.059385] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e15c25-4eda-b825-28c8-bb82384f0383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2807.120461] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2807.120719] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2807.120978] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleting the datastore file [datastore1] 9c1ab5e2-d95e-46b3-9f98-835606a81b57 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2807.121197] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5db0094f-7a53-4287-bc90-4113f999e578 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.127769] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2807.127769] env[61649]: value = "task-158355" [ 2807.127769] env[61649]: _type = "Task" [ 2807.127769] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2807.135120] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2807.561771] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2807.562082] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating directory with path [datastore1] vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2807.562324] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32ed9da7-298b-421c-9d5e-40913107447c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.573839] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Created directory with path [datastore1] vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2807.573944] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Fetch image to [datastore1] vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2807.574017] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2807.574702] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cbf1d5-c23e-4541-be37-14883ca96715 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.581025] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce74494-138c-4b72-8e9d-0978085470ff {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.589979] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30adf60b-043e-4593-9923-f283278acd02 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.621138] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811bdee5-c545-451f-8328-096e86b321fe {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.626585] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b7638654-35d0-4036-b509-c17fae1eac71 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.635209] env[61649]: DEBUG oslo_vmware.api [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067698} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2807.635437] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2807.635614] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2807.635780] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2807.636041] env[61649]: INFO nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2807.638134] env[61649]: DEBUG nova.compute.claims [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2807.638299] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2807.638518] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2807.640344] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg f64fb9766da64713a574141976aa5fb1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2807.656073] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2807.672116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f64fb9766da64713a574141976aa5fb1 [ 2807.704214] env[61649]: DEBUG oslo_vmware.rw_handles [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2807.763453] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f0ec7b-4fd2-4cec-8128-0b1dd27abfc5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.768358] env[61649]: DEBUG oslo_vmware.rw_handles [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2807.768623] env[61649]: DEBUG oslo_vmware.rw_handles [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2807.772292] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c150b8b1-9f46-43f8-b1b8-9fec1ff78f9f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.802166] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16996ee-816d-464f-87c9-55ba62237f26 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.808830] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa162c78-4165-48da-8f49-48783e4bcd5d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.821383] env[61649]: DEBUG nova.compute.provider_tree [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2807.821851] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b31d2f5d18bb4e31aef04d0528fd4217 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2807.829109] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b31d2f5d18bb4e31aef04d0528fd4217 [ 2807.830031] env[61649]: DEBUG nova.scheduler.client.report [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2807.832257] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 392dacbf46c541c182a30083f065e352 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2807.842937] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 392dacbf46c541c182a30083f065e352 [ 2807.843619] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.205s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2807.844149] env[61649]: ERROR nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2807.844149] env[61649]: Faults: ['InvalidArgument'] [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Traceback (most recent call last): [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self.driver.spawn(context, instance, image_meta, [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self._fetch_image_if_missing(context, vi) [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] image_cache(vi, tmp_image_ds_loc) [ 2807.844149] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] vm_util.copy_virtual_disk( [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] session._wait_for_task(vmdk_copy_task) [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] return self.wait_for_task(task_ref) [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] return evt.wait() [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] result = hub.switch() [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] return self.greenlet.switch() [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2807.844478] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] self.f(*self.args, **self.kw) [ 2807.844831] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2807.844831] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] raise exceptions.translate_fault(task_info.error) [ 2807.844831] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2807.844831] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Faults: ['InvalidArgument'] [ 2807.844831] env[61649]: ERROR nova.compute.manager [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] [ 2807.844831] env[61649]: DEBUG nova.compute.utils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2807.846228] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Build of instance 9c1ab5e2-d95e-46b3-9f98-835606a81b57 was re-scheduled: A specified parameter was not correct: fileType [ 2807.846228] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2807.846592] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2807.846763] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2807.846931] env[61649]: DEBUG nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2807.847094] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2808.051780] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 881f5978e5ed44c7bba13ef017c7163f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2808.059764] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 881f5978e5ed44c7bba13ef017c7163f [ 2808.060371] env[61649]: DEBUG nova.network.neutron [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2808.060839] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 549aa5028a044e8893a81831867b283c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2808.075195] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 549aa5028a044e8893a81831867b283c [ 2808.075815] env[61649]: INFO nova.compute.manager [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9c1ab5e2-d95e-46b3-9f98-835606a81b57] Took 0.23 seconds to deallocate network for instance. [ 2808.077487] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg f2193b2a1cad4c9c94c5edd96872b5dd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2808.108351] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2193b2a1cad4c9c94c5edd96872b5dd [ 2808.110860] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 1472830cb3ae45cba8e573d6cf040b7d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2808.143631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1472830cb3ae45cba8e573d6cf040b7d [ 2808.159299] env[61649]: INFO nova.scheduler.client.report [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted allocations for instance 9c1ab5e2-d95e-46b3-9f98-835606a81b57 [ 2808.165361] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 66618c7157f241bc97f08775adce40dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2808.176070] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66618c7157f241bc97f08775adce40dc [ 2808.176070] env[61649]: DEBUG oslo_concurrency.lockutils [None req-d6de8af0-497b-48c0-b731-77fd4a63718a tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "9c1ab5e2-d95e-46b3-9f98-835606a81b57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.124s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2809.343212] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "9b274fb5-69da-4af6-9d85-6d372193974f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2809.343505] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "9b274fb5-69da-4af6-9d85-6d372193974f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2809.343926] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg f7df768d0ba249cdafccc3c1a92661e6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.352018] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7df768d0ba249cdafccc3c1a92661e6 [ 2809.352466] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2809.355751] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b6899597efd24b17bceb5ec076898625 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.385921] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6899597efd24b17bceb5ec076898625 [ 2809.399848] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2809.400165] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2809.401605] env[61649]: INFO nova.compute.claims [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2809.403098] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 56269f700f5842c5b7c88363eae6b8be in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.432749] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56269f700f5842c5b7c88363eae6b8be [ 2809.434405] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d8c14bf30b2e45daba79d5d44d5dbbaa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.441338] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8c14bf30b2e45daba79d5d44d5dbbaa [ 2809.481771] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c3dcd3-a4ab-47bf-aedf-4b9720b1b07e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.489189] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76698e60-1a66-4f82-b739-d6fc546d9deb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.518156] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4481c7-0e07-46a6-a880-8026488d9f81 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.524783] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a4ea28-f260-43ff-80bd-f87a716daba4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.538049] env[61649]: DEBUG nova.compute.provider_tree [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2809.538516] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 742f487def8648299769479648d826b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.546073] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 742f487def8648299769479648d826b4 [ 2809.546930] env[61649]: DEBUG nova.scheduler.client.report [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2809.549058] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 74462af1e1464b8787ac4de3a75e0bb8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.560720] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74462af1e1464b8787ac4de3a75e0bb8 [ 2809.561473] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.161s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2809.561919] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2809.563565] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 00bafe965f274b8c9c11a54604b78481 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.589921] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00bafe965f274b8c9c11a54604b78481 [ 2809.591376] env[61649]: DEBUG nova.compute.utils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2809.591928] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 35443838a584422f8858175fe1c2c7a9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.592981] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2809.593180] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2809.600122] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35443838a584422f8858175fe1c2c7a9 [ 2809.600616] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2809.602231] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 5cfb067198c3440fb2be56d0a89a31ab in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.630116] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cfb067198c3440fb2be56d0a89a31ab [ 2809.632580] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 67ac5eedcbeb461b9e9832caeef5e487 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2809.635380] env[61649]: DEBUG nova.policy [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc5f71ebe35b4863a38dd7606ae87937', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72501ae7a7dd4f85801c096912a5af36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2809.670941] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67ac5eedcbeb461b9e9832caeef5e487 [ 2809.672199] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2809.694552] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2809.694805] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2809.694962] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2809.695153] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2809.695301] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2809.695446] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2809.695665] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2809.695853] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2809.696046] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2809.696215] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2809.696390] env[61649]: DEBUG nova.virt.hardware [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2809.697247] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a0da1a-54cf-4717-9a98-975927550865 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.707746] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e525e978-1ea1-44c4-873c-99b4b496705e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2809.884037] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Successfully created port: 29958894-863c-42a4-bb25-c7d121bd6111 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2810.286622] env[61649]: DEBUG nova.compute.manager [req-8c86b3be-e306-433b-8382-ca98250a22c2 req-86f3a2ca-58fe-4f1c-95c5-68d05eb27b25 service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Received event network-vif-plugged-29958894-863c-42a4-bb25-c7d121bd6111 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2810.286861] env[61649]: DEBUG oslo_concurrency.lockutils [req-8c86b3be-e306-433b-8382-ca98250a22c2 req-86f3a2ca-58fe-4f1c-95c5-68d05eb27b25 service nova] Acquiring lock "9b274fb5-69da-4af6-9d85-6d372193974f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2810.287081] env[61649]: DEBUG oslo_concurrency.lockutils [req-8c86b3be-e306-433b-8382-ca98250a22c2 req-86f3a2ca-58fe-4f1c-95c5-68d05eb27b25 service nova] Lock "9b274fb5-69da-4af6-9d85-6d372193974f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2810.287254] env[61649]: DEBUG oslo_concurrency.lockutils [req-8c86b3be-e306-433b-8382-ca98250a22c2 req-86f3a2ca-58fe-4f1c-95c5-68d05eb27b25 service nova] Lock "9b274fb5-69da-4af6-9d85-6d372193974f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2810.287541] env[61649]: DEBUG nova.compute.manager [req-8c86b3be-e306-433b-8382-ca98250a22c2 req-86f3a2ca-58fe-4f1c-95c5-68d05eb27b25 service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] No waiting events found dispatching network-vif-plugged-29958894-863c-42a4-bb25-c7d121bd6111 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2810.287619] env[61649]: WARNING nova.compute.manager [req-8c86b3be-e306-433b-8382-ca98250a22c2 req-86f3a2ca-58fe-4f1c-95c5-68d05eb27b25 service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Received unexpected event network-vif-plugged-29958894-863c-42a4-bb25-c7d121bd6111 for instance with vm_state building and task_state spawning. [ 2810.351141] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Successfully updated port: 29958894-863c-42a4-bb25-c7d121bd6111 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2810.351776] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7808f06d5be3454baca2d46155d1099e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2810.359017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7808f06d5be3454baca2d46155d1099e [ 2810.359632] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "refresh_cache-9b274fb5-69da-4af6-9d85-6d372193974f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2810.359766] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "refresh_cache-9b274fb5-69da-4af6-9d85-6d372193974f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2810.359911] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2810.360328] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 2b8d3a65ddd242a995f27daa3478fc24 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2810.366881] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b8d3a65ddd242a995f27daa3478fc24 [ 2810.420654] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2810.573987] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Updating instance_info_cache with network_info: [{"id": "29958894-863c-42a4-bb25-c7d121bd6111", "address": "fa:16:3e:45:41:59", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29958894-86", "ovs_interfaceid": "29958894-863c-42a4-bb25-c7d121bd6111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2810.574625] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b5fd4cf08e4242bda8aba826cc06bc0d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2810.587474] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5fd4cf08e4242bda8aba826cc06bc0d [ 2810.588038] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "refresh_cache-9b274fb5-69da-4af6-9d85-6d372193974f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2810.588306] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Instance network_info: |[{"id": "29958894-863c-42a4-bb25-c7d121bd6111", "address": "fa:16:3e:45:41:59", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29958894-86", "ovs_interfaceid": "29958894-863c-42a4-bb25-c7d121bd6111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2810.588687] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:41:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29958894-863c-42a4-bb25-c7d121bd6111', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2810.596496] env[61649]: DEBUG oslo.service.loopingcall [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2810.596932] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2810.597157] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57592107-4b8b-4ddc-854a-435086f5cea8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.616884] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2810.616884] env[61649]: value = "task-158356" [ 2810.616884] env[61649]: _type = "Task" [ 2810.616884] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2810.624021] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158356, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2811.000334] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9fce299924324b4abc9fa51c3303d9bb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2811.009272] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fce299924324b4abc9fa51c3303d9bb [ 2811.127984] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158356, 'name': CreateVM_Task, 'duration_secs': 0.280447} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2811.128198] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2811.128865] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2811.129033] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2811.129356] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2811.129599] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d2f6520-650a-455f-b865-1135fde0c44c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2811.133839] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2811.133839] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]524c4f87-ec7e-fbd5-88a5-16a3da8991da" [ 2811.133839] env[61649]: _type = "Task" [ 2811.133839] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2811.141795] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]524c4f87-ec7e-fbd5-88a5-16a3da8991da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2811.644217] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2811.644516] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2811.644673] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2812.313281] env[61649]: DEBUG nova.compute.manager [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Received event network-changed-29958894-863c-42a4-bb25-c7d121bd6111 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2812.313485] env[61649]: DEBUG nova.compute.manager [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Refreshing instance network info cache due to event network-changed-29958894-863c-42a4-bb25-c7d121bd6111. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2812.313700] env[61649]: DEBUG oslo_concurrency.lockutils [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] Acquiring lock "refresh_cache-9b274fb5-69da-4af6-9d85-6d372193974f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2812.313841] env[61649]: DEBUG oslo_concurrency.lockutils [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] Acquired lock "refresh_cache-9b274fb5-69da-4af6-9d85-6d372193974f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2812.313997] env[61649]: DEBUG nova.network.neutron [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Refreshing network info cache for port 29958894-863c-42a4-bb25-c7d121bd6111 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2812.314476] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] Expecting reply to msg 5a45775e8be74ad79623798437ae1ace in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2812.321525] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a45775e8be74ad79623798437ae1ace [ 2812.564651] env[61649]: DEBUG nova.network.neutron [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Updated VIF entry in instance network info cache for port 29958894-863c-42a4-bb25-c7d121bd6111. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2812.565028] env[61649]: DEBUG nova.network.neutron [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Updating instance_info_cache with network_info: [{"id": "29958894-863c-42a4-bb25-c7d121bd6111", "address": "fa:16:3e:45:41:59", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29958894-86", "ovs_interfaceid": "29958894-863c-42a4-bb25-c7d121bd6111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2812.565612] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] Expecting reply to msg 8d0aaa118b4a4b9d81eccb810fce6be5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2812.573777] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d0aaa118b4a4b9d81eccb810fce6be5 [ 2812.574359] env[61649]: DEBUG oslo_concurrency.lockutils [req-54f0bd3f-b67d-4024-9dd5-f19220e6534e req-e95680b9-290c-48b0-a340-40692164765d service nova] Releasing lock "refresh_cache-9b274fb5-69da-4af6-9d85-6d372193974f" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2829.157639] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2829.157927] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2830.929378] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2830.929834] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2836.925285] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2840.928610] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2840.929007] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2840.929007] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2840.929746] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg f122c0815d62434f9b04bb9ca0f155e6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2840.940185] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f122c0815d62434f9b04bb9ca0f155e6 [ 2840.941250] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2840.941400] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2840.941530] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2840.941967] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2840.942150] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2841.929784] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2842.929083] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2842.929427] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg bf251db26f0d49518c94bc6fdab5bc43 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2842.938198] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf251db26f0d49518c94bc6fdab5bc43 [ 2842.939168] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2842.939388] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2842.939563] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2842.939717] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2842.940990] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6c4265-fa4d-4084-b016-68e0d52c1c02 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.949552] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fb7362-fd13-482b-b63b-b9c8e4f63bca {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.963203] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fc2daa-1592-487a-96e2-60d14b47ce89 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.968969] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c26bbe-5b04-48b0-8981-277d16b77ebd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2842.996868] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181823MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2842.997006] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2842.997188] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2842.997971] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg e151a93a67d04516a0641948a1d15a63 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2843.011579] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e151a93a67d04516a0641948a1d15a63 [ 2843.013030] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ea8e767420d34088bbe928520c84c0f6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2843.020900] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea8e767420d34088bbe928520c84c0f6 [ 2843.035985] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance ba9f78f8-b051-4d42-97dc-5fa9c19753f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2843.036151] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9b274fb5-69da-4af6-9d85-6d372193974f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2843.036329] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2843.036468] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2843.072032] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181d5d8a-13fd-4d39-a348-9b539ae3965d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2843.078240] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2164c745-1aee-42ff-abfa-d065cce0d40d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2843.107025] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1f4142-b38e-44ac-bd59-616326f78fed {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2843.113536] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f187104-a698-41c2-bd39-1bf129dcc305 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2843.127242] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2843.127661] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 28ea5369a72842099bd3c75c4e119cc9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2843.134946] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28ea5369a72842099bd3c75c4e119cc9 [ 2843.135756] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2843.137844] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 0cd4e4ab64b04f2a8f2a938fac2b128e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2843.147775] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cd4e4ab64b04f2a8f2a938fac2b128e [ 2843.148385] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2843.148560] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.151s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2848.144602] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2848.145258] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg caebdf51b6e7400186c42b5977ed780d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2848.155535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg caebdf51b6e7400186c42b5977ed780d [ 2856.253300] env[61649]: WARNING oslo_vmware.rw_handles [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2856.253300] env[61649]: ERROR oslo_vmware.rw_handles [ 2856.253956] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2856.255770] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2856.256040] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Copying Virtual Disk [datastore1] vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/0e50784b-d4fd-4f67-81b6-1fd56e9b369e/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2856.256335] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91938d02-f792-4219-95ce-2415a9911e78 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.265305] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2856.265305] env[61649]: value = "task-158357" [ 2856.265305] env[61649]: _type = "Task" [ 2856.265305] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2856.273819] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2856.775643] env[61649]: DEBUG oslo_vmware.exceptions [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2856.775930] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2856.776680] env[61649]: ERROR nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2856.776680] env[61649]: Faults: ['InvalidArgument'] [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Traceback (most recent call last): [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] yield resources [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self.driver.spawn(context, instance, image_meta, [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self._fetch_image_if_missing(context, vi) [ 2856.776680] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] image_cache(vi, tmp_image_ds_loc) [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] vm_util.copy_virtual_disk( [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] session._wait_for_task(vmdk_copy_task) [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] return self.wait_for_task(task_ref) [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] return evt.wait() [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] result = hub.switch() [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2856.777026] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] return self.greenlet.switch() [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self.f(*self.args, **self.kw) [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] raise exceptions.translate_fault(task_info.error) [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Faults: ['InvalidArgument'] [ 2856.777354] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] [ 2856.777354] env[61649]: INFO nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Terminating instance [ 2856.778610] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2856.778806] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2856.779051] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8bc0cd5-4e83-49a5-bef7-0679d40153b2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.781389] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2856.781572] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2856.782299] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55975c81-a0c5-45af-bb0d-709eca50e632 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.789283] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2856.789572] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4391993-1dca-4ed3-bbe1-b97588494ffd {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.792249] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2856.792365] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2856.793323] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-554ec89d-246c-414e-b868-5997c46da437 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.798554] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2856.798554] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5216cb67-dd19-a4ae-ac5c-9b99ea910b90" [ 2856.798554] env[61649]: _type = "Task" [ 2856.798554] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2856.809404] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5216cb67-dd19-a4ae-ac5c-9b99ea910b90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2856.867351] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2856.867729] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2856.868073] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleting the datastore file [datastore1] ba9f78f8-b051-4d42-97dc-5fa9c19753f3 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2856.868466] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcf62f7d-797f-449f-b2f1-66fc1eb27816 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.876674] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Waiting for the task: (returnval){ [ 2856.876674] env[61649]: value = "task-158359" [ 2856.876674] env[61649]: _type = "Task" [ 2856.876674] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2856.885866] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2857.308728] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2857.309129] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2857.309228] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4e07359-be57-47ff-beba-2dcbce83f310 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.321447] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2857.321638] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Fetch image to [datastore1] vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2857.321807] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2857.322568] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fc9d93-e961-499f-930e-f8800a51cce2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.329132] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4815346-ae1e-4482-ab77-bf12a4cd44cc {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.338335] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab3f493-d6e2-4a5c-b2e5-e45ec3d20b06 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.369463] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec81a52-fdff-4a62-97ea-92323f5b01a8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.375440] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d7fd3d24-3e0e-4702-a2bf-0cc8321eb952 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.384673] env[61649]: DEBUG oslo_vmware.api [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Task: {'id': task-158359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073909} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2857.384909] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2857.385132] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2857.385314] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2857.385490] env[61649]: INFO nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2857.387608] env[61649]: DEBUG nova.compute.claims [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2857.387781] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2857.387997] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2857.389918] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg f891c172d1bd436abc599c1233a9ca34 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.399622] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2857.422996] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f891c172d1bd436abc599c1233a9ca34 [ 2857.471289] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43f4295-dde2-486d-b28b-a05116605ce9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.480228] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377e49c1-90e5-4824-9d08-215895671ad3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.513594] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae6c236-9b7b-4d24-b936-461cd4281d84 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.520923] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd1f8cd-08ce-4721-bde7-243b6524b403 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.535239] env[61649]: DEBUG nova.compute.provider_tree [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2857.535741] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 24de0a649edd491193a3da26b94b170d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.544081] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24de0a649edd491193a3da26b94b170d [ 2857.545052] env[61649]: DEBUG nova.scheduler.client.report [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2857.547204] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 5abf6e6a1f6f4ecd8d673d8f9a333dcd in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.556421] env[61649]: DEBUG oslo_vmware.rw_handles [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2857.558389] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5abf6e6a1f6f4ecd8d673d8f9a333dcd [ 2857.559243] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.171s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2857.559741] env[61649]: ERROR nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2857.559741] env[61649]: Faults: ['InvalidArgument'] [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Traceback (most recent call last): [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self.driver.spawn(context, instance, image_meta, [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self._fetch_image_if_missing(context, vi) [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] image_cache(vi, tmp_image_ds_loc) [ 2857.559741] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] vm_util.copy_virtual_disk( [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] session._wait_for_task(vmdk_copy_task) [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] return self.wait_for_task(task_ref) [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] return evt.wait() [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] result = hub.switch() [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] return self.greenlet.switch() [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2857.560336] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] self.f(*self.args, **self.kw) [ 2857.560868] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2857.560868] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] raise exceptions.translate_fault(task_info.error) [ 2857.560868] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2857.560868] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Faults: ['InvalidArgument'] [ 2857.560868] env[61649]: ERROR nova.compute.manager [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] [ 2857.560868] env[61649]: DEBUG nova.compute.utils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2857.562069] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Build of instance ba9f78f8-b051-4d42-97dc-5fa9c19753f3 was re-scheduled: A specified parameter was not correct: fileType [ 2857.562069] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2857.562464] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2857.562632] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2857.562834] env[61649]: DEBUG nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2857.562979] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2857.622961] env[61649]: DEBUG oslo_vmware.rw_handles [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2857.623190] env[61649]: DEBUG oslo_vmware.rw_handles [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2857.840879] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 18de69098a784e26946d80fba23c3fef in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.849370] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18de69098a784e26946d80fba23c3fef [ 2857.849931] env[61649]: DEBUG nova.network.neutron [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2857.850512] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg a4c97523cd65472a8dcff224aeb3d412 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.859952] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4c97523cd65472a8dcff224aeb3d412 [ 2857.860585] env[61649]: INFO nova.compute.manager [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] [instance: ba9f78f8-b051-4d42-97dc-5fa9c19753f3] Took 0.30 seconds to deallocate network for instance. [ 2857.862268] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 4e7eaffc545b48b79edae63d40d57042 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.893078] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e7eaffc545b48b79edae63d40d57042 [ 2857.895882] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 259009eda94a45b5bd4f1e10f1bbb3a6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.924997] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 259009eda94a45b5bd4f1e10f1bbb3a6 [ 2857.942647] env[61649]: INFO nova.scheduler.client.report [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Deleted allocations for instance ba9f78f8-b051-4d42-97dc-5fa9c19753f3 [ 2857.948699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Expecting reply to msg 8cfff7ce2f204ecc9a7b7ba2a7bf5640 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2857.959093] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cfff7ce2f204ecc9a7b7ba2a7bf5640 [ 2857.959635] env[61649]: DEBUG oslo_concurrency.lockutils [None req-e2f9f666-b7f1-406b-b058-c32f89f3cd27 tempest-DeleteServersTestJSON-713044590 tempest-DeleteServersTestJSON-713044590-project-member] Lock "ba9f78f8-b051-4d42-97dc-5fa9c19753f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.541s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2863.299351] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2863.299705] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2863.300829] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 4054a2b404df435fb21596e4d5acf2df in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.312934] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4054a2b404df435fb21596e4d5acf2df [ 2863.313494] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2863.315208] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg a8afbb7e852a41d08ce4fb73c955d73b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.345480] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8afbb7e852a41d08ce4fb73c955d73b [ 2863.360578] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2863.360892] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2863.362349] env[61649]: INFO nova.compute.claims [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2863.363948] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 0a8b497fcedf4732a14574c1f71a8f36 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.393037] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a8b497fcedf4732a14574c1f71a8f36 [ 2863.394603] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 98cb86c0a79c45feb1d40d9110c1daf2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.401363] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98cb86c0a79c45feb1d40d9110c1daf2 [ 2863.438506] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3579b0da-6c42-49c4-9cf5-87db1b227853 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.445960] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bce5374-2389-459a-8342-480b569c7ed6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.477534] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e6b9de-69e5-4f89-838f-a28535454348 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.484786] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4762b122-ca19-4daa-8674-a4577b093241 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.498315] env[61649]: DEBUG nova.compute.provider_tree [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2863.498792] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 41bd11b970124d76bf5b05fd67a08cd3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.506371] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41bd11b970124d76bf5b05fd67a08cd3 [ 2863.507270] env[61649]: DEBUG nova.scheduler.client.report [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2863.509466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 352745b3993c41c6bda45c70e9f60e63 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.519543] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 352745b3993c41c6bda45c70e9f60e63 [ 2863.520295] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.159s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2863.520645] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 5757b129843941368900e209f963894a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.534490] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5757b129843941368900e209f963894a [ 2863.535237] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "074d780e-527c-432f-8805-9572ebceb865" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2863.535476] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "074d780e-527c-432f-8805-9572ebceb865" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2863.535902] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg d61b590641314521977e7596d3dbbf8a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.540835] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d61b590641314521977e7596d3dbbf8a [ 2863.541337] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "074d780e-527c-432f-8805-9572ebceb865" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.006s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2863.541881] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2863.543899] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg eace9f8039b2418b8378edf416a6eb95 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.573670] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eace9f8039b2418b8378edf416a6eb95 [ 2863.575243] env[61649]: DEBUG nova.compute.utils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2863.575831] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg b615961fbaa7426a9c53952a8490ff33 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.576636] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2863.576797] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2863.593458] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b615961fbaa7426a9c53952a8490ff33 [ 2863.593992] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2863.595690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 16218d2abc4947bab343ad5c70c950fa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.626396] env[61649]: DEBUG nova.policy [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '419e4a35342640b8babc3b86757fe4ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58cd3d0d383c4942b607b6c0e513f5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2863.628498] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16218d2abc4947bab343ad5c70c950fa [ 2863.631386] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 7fc6d6767ae74fe698dc1af5e5f0e6f2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2863.669080] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fc6d6767ae74fe698dc1af5e5f0e6f2 [ 2863.670260] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2863.691655] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2863.691875] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2863.692089] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2863.692238] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2863.692402] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2863.692553] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2863.692750] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2863.692902] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2863.693074] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2863.693231] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2863.693406] env[61649]: DEBUG nova.virt.hardware [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2863.694250] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1393dd7b-27d6-4b47-bc6d-8fc2324ce675 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.701983] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d300931c-4e7b-474f-b6ac-8d272953bd0b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2863.880527] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Successfully created port: 1a212b56-db50-4611-9f31-f95eee629b00 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2864.356501] env[61649]: DEBUG nova.compute.manager [req-4528bd19-46f0-4e48-9a9f-4b1b42eab617 req-5820d382-7361-479f-8b9b-c6ab30c19f81 service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Received event network-vif-plugged-1a212b56-db50-4611-9f31-f95eee629b00 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2864.356770] env[61649]: DEBUG oslo_concurrency.lockutils [req-4528bd19-46f0-4e48-9a9f-4b1b42eab617 req-5820d382-7361-479f-8b9b-c6ab30c19f81 service nova] Acquiring lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2864.356922] env[61649]: DEBUG oslo_concurrency.lockutils [req-4528bd19-46f0-4e48-9a9f-4b1b42eab617 req-5820d382-7361-479f-8b9b-c6ab30c19f81 service nova] Lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2864.357087] env[61649]: DEBUG oslo_concurrency.lockutils [req-4528bd19-46f0-4e48-9a9f-4b1b42eab617 req-5820d382-7361-479f-8b9b-c6ab30c19f81 service nova] Lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2864.357248] env[61649]: DEBUG nova.compute.manager [req-4528bd19-46f0-4e48-9a9f-4b1b42eab617 req-5820d382-7361-479f-8b9b-c6ab30c19f81 service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] No waiting events found dispatching network-vif-plugged-1a212b56-db50-4611-9f31-f95eee629b00 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2864.357403] env[61649]: WARNING nova.compute.manager [req-4528bd19-46f0-4e48-9a9f-4b1b42eab617 req-5820d382-7361-479f-8b9b-c6ab30c19f81 service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Received unexpected event network-vif-plugged-1a212b56-db50-4611-9f31-f95eee629b00 for instance with vm_state building and task_state spawning. [ 2864.431028] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Successfully updated port: 1a212b56-db50-4611-9f31-f95eee629b00 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2864.431567] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg fbafaa4b00314b5b88718841ed17f651 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2864.438832] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbafaa4b00314b5b88718841ed17f651 [ 2864.439492] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "refresh_cache-68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2864.439629] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquired lock "refresh_cache-68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2864.439783] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2864.440216] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg b781fcd576864a6bb29e780ecfb9c1c7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2864.446773] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b781fcd576864a6bb29e780ecfb9c1c7 [ 2864.482337] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2864.616135] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Updating instance_info_cache with network_info: [{"id": "1a212b56-db50-4611-9f31-f95eee629b00", "address": "fa:16:3e:21:2c:74", "network": {"id": "886a1dee-a64b-4dde-8880-3722d2b42ef3", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1629403223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58cd3d0d383c4942b607b6c0e513f5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a212b56-db", "ovs_interfaceid": "1a212b56-db50-4611-9f31-f95eee629b00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2864.616737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg eabaa0a635b44fdda6b6cf741a7ad084 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2864.628339] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eabaa0a635b44fdda6b6cf741a7ad084 [ 2864.628870] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Releasing lock "refresh_cache-68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2864.629130] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Instance network_info: |[{"id": "1a212b56-db50-4611-9f31-f95eee629b00", "address": "fa:16:3e:21:2c:74", "network": {"id": "886a1dee-a64b-4dde-8880-3722d2b42ef3", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1629403223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58cd3d0d383c4942b607b6c0e513f5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a212b56-db", "ovs_interfaceid": "1a212b56-db50-4611-9f31-f95eee629b00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2864.629505] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:2c:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a212b56-db50-4611-9f31-f95eee629b00', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2864.636975] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Creating folder: Project (58cd3d0d383c4942b607b6c0e513f5d4). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2864.637452] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30c1a935-45ae-45a3-b087-96f747f7fd78 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.650174] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Created folder: Project (58cd3d0d383c4942b607b6c0e513f5d4) in parent group-v51588. [ 2864.650362] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Creating folder: Instances. Parent ref: group-v51709. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2864.650576] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27c23710-e9be-4e07-b17e-cfa2663edbce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.659406] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Created folder: Instances in parent group-v51709. [ 2864.659624] env[61649]: DEBUG oslo.service.loopingcall [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2864.659799] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2864.660054] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f4e996e-18e3-4ed1-a203-6d8ff46fa919 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2864.679083] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2864.679083] env[61649]: value = "task-158362" [ 2864.679083] env[61649]: _type = "Task" [ 2864.679083] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2864.686261] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158362, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2865.189672] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158362, 'name': CreateVM_Task, 'duration_secs': 0.279739} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2865.189840] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2865.190527] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2865.190696] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2865.191033] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2865.191289] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47e60a1f-54dc-428b-b86b-ace1f2a04b57 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2865.195737] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Waiting for the task: (returnval){ [ 2865.195737] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5271f95c-5e96-c06b-33a9-df55e14a994e" [ 2865.195737] env[61649]: _type = "Task" [ 2865.195737] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2865.202953] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5271f95c-5e96-c06b-33a9-df55e14a994e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2865.706072] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2865.706451] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2865.706581] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2866.389331] env[61649]: DEBUG nova.compute.manager [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Received event network-changed-1a212b56-db50-4611-9f31-f95eee629b00 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2866.389523] env[61649]: DEBUG nova.compute.manager [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Refreshing instance network info cache due to event network-changed-1a212b56-db50-4611-9f31-f95eee629b00. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2866.389737] env[61649]: DEBUG oslo_concurrency.lockutils [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] Acquiring lock "refresh_cache-68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2866.389874] env[61649]: DEBUG oslo_concurrency.lockutils [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] Acquired lock "refresh_cache-68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2866.390065] env[61649]: DEBUG nova.network.neutron [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Refreshing network info cache for port 1a212b56-db50-4611-9f31-f95eee629b00 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2866.390546] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] Expecting reply to msg de20ff646738466a98c71d759bba5a94 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2866.398124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de20ff646738466a98c71d759bba5a94 [ 2866.773685] env[61649]: DEBUG nova.network.neutron [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Updated VIF entry in instance network info cache for port 1a212b56-db50-4611-9f31-f95eee629b00. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2866.774023] env[61649]: DEBUG nova.network.neutron [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Updating instance_info_cache with network_info: [{"id": "1a212b56-db50-4611-9f31-f95eee629b00", "address": "fa:16:3e:21:2c:74", "network": {"id": "886a1dee-a64b-4dde-8880-3722d2b42ef3", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1629403223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58cd3d0d383c4942b607b6c0e513f5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a212b56-db", "ovs_interfaceid": "1a212b56-db50-4611-9f31-f95eee629b00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2866.774535] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] Expecting reply to msg 04d690d5a99748e5a82437a8db7e2744 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2866.783358] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04d690d5a99748e5a82437a8db7e2744 [ 2866.783924] env[61649]: DEBUG oslo_concurrency.lockutils [req-9fa4e433-a252-4648-9ea5-feca5905cf71 req-bfd26ab4-bf71-44b8-981a-284d018579ca service nova] Releasing lock "refresh_cache-68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2890.929730] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2890.930114] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2890.930114] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2890.930278] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2898.927130] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2901.929335] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2902.928685] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2902.928884] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2902.929022] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2902.929655] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3ed9cb6968d348dabeef5f78312dfe5c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2902.939112] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ed9cb6968d348dabeef5f78312dfe5c [ 2902.940167] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2902.940322] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2902.940452] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2902.940942] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2903.929465] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2903.929707] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2903.930088] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 02e8f5bb6a1e41edad8db0dfce106be8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2903.938588] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02e8f5bb6a1e41edad8db0dfce106be8 [ 2903.939516] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2903.939725] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2903.939892] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2903.940085] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2903.941184] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5a654f-d7ed-4683-a85a-993c100221f2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.950959] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062eea77-1515-4921-931b-361bb716cdb9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.964382] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268c7c1e-cc57-4e82-9ae7-96e5beaee7af {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.970664] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca414c5-2468-41c9-8739-de8387e94dd8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2903.998675] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181829MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2903.998852] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2903.999002] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2903.999833] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 012bfb10ee7a4f8094325ed7c4b78891 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2904.015571] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 012bfb10ee7a4f8094325ed7c4b78891 [ 2904.017063] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 663c4b3b43054424a2379cdc1f4d7c5f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2904.026367] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 663c4b3b43054424a2379cdc1f4d7c5f [ 2904.041589] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 9b274fb5-69da-4af6-9d85-6d372193974f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2904.041738] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2904.041909] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2904.042048] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2904.077004] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c869b4c-67b7-4577-9baf-a6d99deaf908 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.083993] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473ac0d5-eff8-43bc-85f6-43c800a57b07 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.113913] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2068eae2-29c8-4eec-9cd2-887c5d90b18d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.120256] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f68b6e-1aee-453b-9759-1054ae455250 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.132657] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2904.133072] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 1be7d6c39b29438396e885e4a54ada85 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2904.140049] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1be7d6c39b29438396e885e4a54ada85 [ 2904.140226] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2904.142357] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3b3dcaf46c42445a91e0838d529ae873 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2904.171618] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b3dcaf46c42445a91e0838d529ae873 [ 2904.172350] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2904.172532] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.174s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2905.593106] env[61649]: WARNING oslo_vmware.rw_handles [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2905.593106] env[61649]: ERROR oslo_vmware.rw_handles [ 2905.593764] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2905.595461] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2905.595729] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Copying Virtual Disk [datastore1] vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/12529819-7855-47ef-b673-33601070fed6/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2905.596013] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ddf21fb-5e88-4288-8ecb-166ff4fa97de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.605759] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2905.605759] env[61649]: value = "task-158363" [ 2905.605759] env[61649]: _type = "Task" [ 2905.605759] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2905.613059] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2906.116193] env[61649]: DEBUG oslo_vmware.exceptions [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2906.116416] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2906.117018] env[61649]: ERROR nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2906.117018] env[61649]: Faults: ['InvalidArgument'] [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Traceback (most recent call last): [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] yield resources [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self.driver.spawn(context, instance, image_meta, [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self._fetch_image_if_missing(context, vi) [ 2906.117018] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] image_cache(vi, tmp_image_ds_loc) [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] vm_util.copy_virtual_disk( [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] session._wait_for_task(vmdk_copy_task) [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] return self.wait_for_task(task_ref) [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] return evt.wait() [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] result = hub.switch() [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2906.117350] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] return self.greenlet.switch() [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self.f(*self.args, **self.kw) [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] raise exceptions.translate_fault(task_info.error) [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Faults: ['InvalidArgument'] [ 2906.117770] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] [ 2906.117770] env[61649]: INFO nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Terminating instance [ 2906.119125] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2906.119349] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2906.119673] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9a8d23a-9d4c-4ff9-b982-8ecd0ee5a0e8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.121726] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2906.121910] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2906.122678] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe5f6f4-0976-4a4c-bd2b-41eb3bf62059 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.129317] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2906.129565] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38a33fcb-3580-47c0-aae8-6973c9325772 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.132026] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2906.132026] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2906.132690] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd48ed8b-48f7-4a62-a461-65ef58bc2bf7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.136840] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Waiting for the task: (returnval){ [ 2906.136840] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5245faf1-f91c-28de-ba71-5117682f419d" [ 2906.136840] env[61649]: _type = "Task" [ 2906.136840] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2906.143623] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5245faf1-f91c-28de-ba71-5117682f419d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2906.194396] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2906.194635] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2906.194807] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleting the datastore file [datastore1] 9b274fb5-69da-4af6-9d85-6d372193974f {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2906.195118] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df43924e-ae3a-4f3f-a584-7fa4483f57f7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.201993] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2906.201993] env[61649]: value = "task-158365" [ 2906.201993] env[61649]: _type = "Task" [ 2906.201993] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2906.209555] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158365, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2906.646856] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2906.647255] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Creating directory with path [datastore1] vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2906.647355] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d61f7c9-c03a-4942-ae70-494124af8fe6 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.658557] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Created directory with path [datastore1] vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2906.658800] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Fetch image to [datastore1] vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2906.659000] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2906.659742] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d82f5e8-67f6-457b-8505-b38a46196960 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.666267] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce0d501-219b-465f-8258-d2e49a043965 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.676022] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b5f4a2-5927-4642-a6b2-74af9be09637 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.707486] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8745ae8a-a0b5-429b-9eb5-ec2219dcc812 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.713861] env[61649]: DEBUG oslo_vmware.api [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158365, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08274} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2906.715229] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2906.715416] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2906.715589] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2906.715763] env[61649]: INFO nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2906.717486] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9a509023-ef52-4687-98c2-1fac599fba15 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.719281] env[61649]: DEBUG nova.compute.claims [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2906.719452] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2906.719660] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.721464] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 7d180255e92a4f1c9f7e49089cc8277d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2906.740847] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2906.752607] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d180255e92a4f1c9f7e49089cc8277d [ 2906.788371] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2906.792393] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebcee51-b78f-4273-ba27-7e436ad4d594 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.851890] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2906.852102] env[61649]: DEBUG oslo_vmware.rw_handles [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2906.855773] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb6194b-d0c3-4bd3-ac75-7f9464aa81ad {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.887224] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ec4820-a09a-4c3d-911f-cb109c407ed5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.894207] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a211080c-f796-4909-a9e0-8c77fa28ea29 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.906699] env[61649]: DEBUG nova.compute.provider_tree [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2906.907150] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b6856228167a4496a4b82584be61e3dc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2906.914168] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6856228167a4496a4b82584be61e3dc [ 2906.914989] env[61649]: DEBUG nova.scheduler.client.report [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2906.917267] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg dbfc9a304a07488bbae1d0f2003ea887 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2906.929499] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbfc9a304a07488bbae1d0f2003ea887 [ 2906.930218] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.210s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.930733] env[61649]: ERROR nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2906.930733] env[61649]: Faults: ['InvalidArgument'] [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Traceback (most recent call last): [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self.driver.spawn(context, instance, image_meta, [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self._fetch_image_if_missing(context, vi) [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] image_cache(vi, tmp_image_ds_loc) [ 2906.930733] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] vm_util.copy_virtual_disk( [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] session._wait_for_task(vmdk_copy_task) [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] return self.wait_for_task(task_ref) [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] return evt.wait() [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] result = hub.switch() [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] return self.greenlet.switch() [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2906.931313] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] self.f(*self.args, **self.kw) [ 2906.931849] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2906.931849] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] raise exceptions.translate_fault(task_info.error) [ 2906.931849] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2906.931849] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Faults: ['InvalidArgument'] [ 2906.931849] env[61649]: ERROR nova.compute.manager [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] [ 2906.931849] env[61649]: DEBUG nova.compute.utils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2906.933087] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Build of instance 9b274fb5-69da-4af6-9d85-6d372193974f was re-scheduled: A specified parameter was not correct: fileType [ 2906.933087] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2906.933465] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2906.933640] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2906.933810] env[61649]: DEBUG nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2906.933974] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2907.157658] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg ec8ea86c2df7486299f84ba573be85a5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2907.165209] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec8ea86c2df7486299f84ba573be85a5 [ 2907.165728] env[61649]: DEBUG nova.network.neutron [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2907.166239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 0afe4af4588c4c159d8d6210017bb97f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2907.175094] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0afe4af4588c4c159d8d6210017bb97f [ 2907.175717] env[61649]: INFO nova.compute.manager [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 9b274fb5-69da-4af6-9d85-6d372193974f] Took 0.24 seconds to deallocate network for instance. [ 2907.177394] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 97fd6311cd084cecbb50789c1b79e633 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2907.209117] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97fd6311cd084cecbb50789c1b79e633 [ 2907.211740] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 526b469bb4914f9fa1495e4e23f52c03 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2907.241609] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 526b469bb4914f9fa1495e4e23f52c03 [ 2907.259421] env[61649]: INFO nova.scheduler.client.report [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted allocations for instance 9b274fb5-69da-4af6-9d85-6d372193974f [ 2907.265466] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d99693c107054bea9200540f04e6f2d2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2907.280395] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d99693c107054bea9200540f04e6f2d2 [ 2907.280984] env[61649]: DEBUG oslo_concurrency.lockutils [None req-03dd36fc-14b1-4e15-a4f4-d53d696e4640 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "9b274fb5-69da-4af6-9d85-6d372193974f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.937s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2908.648108] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2908.648386] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2908.648818] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e8f50b103aaa405d8a5defba6b50aaaf in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.657488] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8f50b103aaa405d8a5defba6b50aaaf [ 2908.658131] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2908.659722] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg e1c2d6bf639c483b93090e2f8d1d9cae in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.687098] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1c2d6bf639c483b93090e2f8d1d9cae [ 2908.702395] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2908.702642] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2908.704085] env[61649]: INFO nova.compute.claims [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2908.705581] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg c1338eaa2e3b47bf94468a60f20ad0e1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.733308] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1338eaa2e3b47bf94468a60f20ad0e1 [ 2908.734737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg c16a8479f89b40958cf640c2d0e2706e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.741017] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c16a8479f89b40958cf640c2d0e2706e [ 2908.776832] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c968f0ef-52d8-4cba-b3f5-afee8bbb6704 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.784649] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0f9d21-af18-4809-b384-7fa90eee5a5b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.814578] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3727fd22-fcee-4c97-a389-6ce0da877b15 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.821114] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698991db-0550-4b96-acb5-c1762f496434 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.833430] env[61649]: DEBUG nova.compute.provider_tree [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2908.833880] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 02e7ac74afe44970a6a5eb4c497f3d90 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.842186] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02e7ac74afe44970a6a5eb4c497f3d90 [ 2908.842988] env[61649]: DEBUG nova.scheduler.client.report [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2908.845095] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d3dd7f2dcf6740ecab33eb3efc84e2da in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.854542] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3dd7f2dcf6740ecab33eb3efc84e2da [ 2908.855166] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.153s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2908.855625] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2908.857247] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b592952d1428403fbef0b476c7358fa2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.886176] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b592952d1428403fbef0b476c7358fa2 [ 2908.887663] env[61649]: DEBUG nova.compute.utils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2908.888350] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 8b9b83ddd6d4400fad463838e2b778e4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.889206] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2908.889374] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2908.896805] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b9b83ddd6d4400fad463838e2b778e4 [ 2908.897282] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2908.898800] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg b39c3e42424e49588a854806a6a96567 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.924124] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b39c3e42424e49588a854806a6a96567 [ 2908.926574] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg fcedc75cb0e4470b9c2929eb513423ba in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2908.932074] env[61649]: DEBUG nova.policy [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc5f71ebe35b4863a38dd7606ae87937', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72501ae7a7dd4f85801c096912a5af36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2908.954082] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcedc75cb0e4470b9c2929eb513423ba [ 2908.955093] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2908.975230] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2908.975461] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2908.975617] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2908.975798] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2908.975942] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2908.976162] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2908.976395] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2908.976555] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2908.976722] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2908.976884] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2908.977056] env[61649]: DEBUG nova.virt.hardware [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2908.978240] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d0c18a-605a-4dce-afb4-74073c442bd2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.985899] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7350e7-ea8a-4137-b9ea-cf964b5b55b1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2909.171305] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Successfully created port: 9d7923d0-fd7d-4d88-9b81-a7593c11135c {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2909.644397] env[61649]: DEBUG nova.compute.manager [req-bceb18d8-fc51-41e4-874c-d9e367d88843 req-df1a653c-bd16-4ad7-8f7d-6f025dda2474 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Received event network-vif-plugged-9d7923d0-fd7d-4d88-9b81-a7593c11135c {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2909.644397] env[61649]: DEBUG oslo_concurrency.lockutils [req-bceb18d8-fc51-41e4-874c-d9e367d88843 req-df1a653c-bd16-4ad7-8f7d-6f025dda2474 service nova] Acquiring lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2909.644397] env[61649]: DEBUG oslo_concurrency.lockutils [req-bceb18d8-fc51-41e4-874c-d9e367d88843 req-df1a653c-bd16-4ad7-8f7d-6f025dda2474 service nova] Lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2909.644397] env[61649]: DEBUG oslo_concurrency.lockutils [req-bceb18d8-fc51-41e4-874c-d9e367d88843 req-df1a653c-bd16-4ad7-8f7d-6f025dda2474 service nova] Lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2909.644556] env[61649]: DEBUG nova.compute.manager [req-bceb18d8-fc51-41e4-874c-d9e367d88843 req-df1a653c-bd16-4ad7-8f7d-6f025dda2474 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] No waiting events found dispatching network-vif-plugged-9d7923d0-fd7d-4d88-9b81-a7593c11135c {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2909.644556] env[61649]: WARNING nova.compute.manager [req-bceb18d8-fc51-41e4-874c-d9e367d88843 req-df1a653c-bd16-4ad7-8f7d-6f025dda2474 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Received unexpected event network-vif-plugged-9d7923d0-fd7d-4d88-9b81-a7593c11135c for instance with vm_state building and task_state spawning. [ 2909.763442] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Successfully updated port: 9d7923d0-fd7d-4d88-9b81-a7593c11135c {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2909.763442] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg c001feb562cc440e91714bada5c5337c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2909.774036] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c001feb562cc440e91714bada5c5337c [ 2909.775034] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "refresh_cache-7c9cff03-a7f6-41fe-b0e2-47260ba304ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2909.775166] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "refresh_cache-7c9cff03-a7f6-41fe-b0e2-47260ba304ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2909.775310] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2909.775694] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 9a92a4be1c6b473b810628b9b8defa40 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2909.782273] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a92a4be1c6b473b810628b9b8defa40 [ 2909.812840] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2909.959737] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Updating instance_info_cache with network_info: [{"id": "9d7923d0-fd7d-4d88-9b81-a7593c11135c", "address": "fa:16:3e:f2:62:8c", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7923d0-fd", "ovs_interfaceid": "9d7923d0-fd7d-4d88-9b81-a7593c11135c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2909.960414] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 5528d1b5d727462d823fae46f43abc02 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2909.970346] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5528d1b5d727462d823fae46f43abc02 [ 2909.970940] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "refresh_cache-7c9cff03-a7f6-41fe-b0e2-47260ba304ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2909.971267] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Instance network_info: |[{"id": "9d7923d0-fd7d-4d88-9b81-a7593c11135c", "address": "fa:16:3e:f2:62:8c", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7923d0-fd", "ovs_interfaceid": "9d7923d0-fd7d-4d88-9b81-a7593c11135c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2909.971750] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:62:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d7923d0-fd7d-4d88-9b81-a7593c11135c', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2909.978966] env[61649]: DEBUG oslo.service.loopingcall [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2909.979412] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2909.979641] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76e8c1ef-3969-4217-89a8-24b993628dc2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2910.002055] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2910.002055] env[61649]: value = "task-158366" [ 2910.002055] env[61649]: _type = "Task" [ 2910.002055] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2910.009463] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158366, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2910.512052] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158366, 'name': CreateVM_Task, 'duration_secs': 0.286991} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2910.512222] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2910.512969] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2910.513137] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2910.513466] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2910.513712] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4669590e-b785-4dac-abd4-dcdd446eb02c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2910.517825] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2910.517825] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]526f03f6-9396-d0e5-801b-707aac688eeb" [ 2910.517825] env[61649]: _type = "Task" [ 2910.517825] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2910.524814] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]526f03f6-9396-d0e5-801b-707aac688eeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2911.028311] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2911.028631] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2911.028770] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2911.673946] env[61649]: DEBUG nova.compute.manager [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Received event network-changed-9d7923d0-fd7d-4d88-9b81-a7593c11135c {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2911.674286] env[61649]: DEBUG nova.compute.manager [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Refreshing instance network info cache due to event network-changed-9d7923d0-fd7d-4d88-9b81-a7593c11135c. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2911.674611] env[61649]: DEBUG oslo_concurrency.lockutils [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] Acquiring lock "refresh_cache-7c9cff03-a7f6-41fe-b0e2-47260ba304ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2911.674810] env[61649]: DEBUG oslo_concurrency.lockutils [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] Acquired lock "refresh_cache-7c9cff03-a7f6-41fe-b0e2-47260ba304ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2911.675019] env[61649]: DEBUG nova.network.neutron [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Refreshing network info cache for port 9d7923d0-fd7d-4d88-9b81-a7593c11135c {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2911.675592] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] Expecting reply to msg ed0f7305e7f3460ab3364f7f1625f719 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2911.682347] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed0f7305e7f3460ab3364f7f1625f719 [ 2912.127069] env[61649]: DEBUG nova.network.neutron [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Updated VIF entry in instance network info cache for port 9d7923d0-fd7d-4d88-9b81-a7593c11135c. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2912.127425] env[61649]: DEBUG nova.network.neutron [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Updating instance_info_cache with network_info: [{"id": "9d7923d0-fd7d-4d88-9b81-a7593c11135c", "address": "fa:16:3e:f2:62:8c", "network": {"id": "8c8e2f95-e820-404a-a1c5-63f49f27fa23", "bridge": "br-int", "label": "tempest-ServersTestJSON-1518117450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72501ae7a7dd4f85801c096912a5af36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7923d0-fd", "ovs_interfaceid": "9d7923d0-fd7d-4d88-9b81-a7593c11135c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2912.127949] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] Expecting reply to msg 210525dbea054a0ca68f163b3e602e9a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2912.136127] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 210525dbea054a0ca68f163b3e602e9a [ 2912.136687] env[61649]: DEBUG oslo_concurrency.lockutils [req-0ff954e3-8c4c-4f6c-8095-ed283b824c8f req-74fb594f-545b-4d9b-81d4-893a8493e6a0 service nova] Releasing lock "refresh_cache-7c9cff03-a7f6-41fe-b0e2-47260ba304ad" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2921.930358] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_image_cache_manager_pass {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2921.930721] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2921.931410] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: waited 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2921.931869] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2921.932134] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2921.932555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2921.932851] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2921.933386] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 77ffbf41bb4645a887d883e71617367d in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2921.946717] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77ffbf41bb4645a887d883e71617367d [ 2921.949713] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd79c956-62ca-4376-b9c9-3f1e6acf0a09 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.960170] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2cbd95-f3c0-4061-9002-3a049cb731c7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.973857] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3cb5ae3f489d4d58b250ecba26fba96b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2921.983456] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cb5ae3f489d4d58b250ecba26fba96b [ 2921.984665] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20c45676-3e6a-41e3-a16e-8afa4f165cae {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.989767] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Waiting for the task: (returnval){ [ 2921.989767] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5254940e-dfbd-834c-5610-79f75aac7197" [ 2921.989767] env[61649]: _type = "Task" [ 2921.989767] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2922.003265] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5254940e-dfbd-834c-5610-79f75aac7197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2922.499679] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]5254940e-dfbd-834c-5610-79f75aac7197, 'name': SearchDatastore_Task, 'duration_secs': 0.015902} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2922.500116] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "[datastore1] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2922.500257] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired lock "[datastore1] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2922.500612] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2922.500866] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dec1e38-b101-47b5-8661-77287243c947 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2922.505216] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Waiting for the task: (returnval){ [ 2922.505216] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e85e6e-3f49-a979-5949-8bf550c6842e" [ 2922.505216] env[61649]: _type = "Task" [ 2922.505216] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2922.512410] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e85e6e-3f49-a979-5949-8bf550c6842e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2923.015923] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52e85e6e-3f49-a979-5949-8bf550c6842e, 'name': SearchDatastore_Task, 'duration_secs': 0.005739} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2923.016353] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Releasing lock "[datastore1] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2923.016428] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2923.016517] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2923.016784] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2923.017050] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf40a93-62fa-48a1-9800-4c1d8d27fb2f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2923.021542] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Waiting for the task: (returnval){ [ 2923.021542] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a6d6cd-f8e0-e516-16e9-dd409d6e9e16" [ 2923.021542] env[61649]: _type = "Task" [ 2923.021542] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2923.028501] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52a6d6cd-f8e0-e516-16e9-dd409d6e9e16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2923.531016] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2923.531718] env[61649]: DEBUG oslo_vmware.service [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c19531c-391f-4d1f-a78e-801e790bfff9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2923.536872] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f6a3ee-6e8d-413d-9fa3-87bf5ee002ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2923.540738] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Waiting for the task: (returnval){ [ 2923.540738] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52dfd501-9909-a6de-6efb-b30319961133" [ 2923.540738] env[61649]: _type = "Task" [ 2923.540738] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2923.548231] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52dfd501-9909-a6de-6efb-b30319961133, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2924.050829] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52dfd501-9909-a6de-6efb-b30319961133, 'name': SearchDatastore_Task, 'duration_secs': 0.014688} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2924.051183] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "[datastore2] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2924.051246] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired lock "[datastore2] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2924.051550] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2924.051811] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d371e66-8551-4db6-8fb1-c202bbda6284 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2924.055970] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Waiting for the task: (returnval){ [ 2924.055970] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c18d7c-f432-7670-a336-ab8363ad6fdf" [ 2924.055970] env[61649]: _type = "Task" [ 2924.055970] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2924.062804] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c18d7c-f432-7670-a336-ab8363ad6fdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2924.565886] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52c18d7c-f432-7670-a336-ab8363ad6fdf, 'name': SearchDatastore_Task, 'duration_secs': 0.005772} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2924.566148] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Releasing lock "[datastore2] devstack-image-cache_base/" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2924.566376] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "[datastore2] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2924.566498] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired lock "[datastore2] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2924.566876] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2924.567177] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b4860f4-6abd-4503-88cf-fec0bab6fcb2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2924.571265] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Waiting for the task: (returnval){ [ 2924.571265] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]523e5e2f-622a-3566-1ead-45bc2bb18f31" [ 2924.571265] env[61649]: _type = "Task" [ 2924.571265] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2924.578456] env[61649]: DEBUG oslo_vmware.api [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]523e5e2f-622a-3566-1ead-45bc2bb18f31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2925.081283] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Releasing lock "[datastore2] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2931.002139] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3857f9ac210c4472b2cdd150e4d633a3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2931.011318] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3857f9ac210c4472b2cdd150e4d633a3 [ 2948.131770] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2948.132485] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a5981eb6e9ff47839c8f73a84d839124 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2948.144353] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5981eb6e9ff47839c8f73a84d839124 [ 2948.145499] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Getting list of instances from cluster (obj){ [ 2948.145499] env[61649]: value = "domain-c8" [ 2948.145499] env[61649]: _type = "ClusterComputeResource" [ 2948.145499] env[61649]: } {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2948.146767] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6a16d7-fcab-4514-95d1-1a021d2ccc77 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2948.158870] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Got total of 2 instances {{(pid=61649) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2948.159031] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2948.159227] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Triggering sync for uuid 7c9cff03-a7f6-41fe-b0e2-47260ba304ad {{(pid=61649) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10327}} [ 2948.159520] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2948.159745] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2950.958041] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2952.928563] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2952.928962] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2952.928962] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 2956.418584] env[61649]: WARNING oslo_vmware.rw_handles [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2956.418584] env[61649]: ERROR oslo_vmware.rw_handles [ 2956.419201] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2956.421249] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2956.421492] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Copying Virtual Disk [datastore1] vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/f64b587d-64eb-4329-800c-47228cdde349/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2956.421766] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c0c8b97-ccce-4e06-a15c-f10f516f71de {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.428988] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Waiting for the task: (returnval){ [ 2956.428988] env[61649]: value = "task-158367" [ 2956.428988] env[61649]: _type = "Task" [ 2956.428988] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2956.436673] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Task: {'id': task-158367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2956.939304] env[61649]: DEBUG oslo_vmware.exceptions [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2956.939570] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2956.940296] env[61649]: ERROR nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2956.940296] env[61649]: Faults: ['InvalidArgument'] [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Traceback (most recent call last): [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] yield resources [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self.driver.spawn(context, instance, image_meta, [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self._fetch_image_if_missing(context, vi) [ 2956.940296] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] image_cache(vi, tmp_image_ds_loc) [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] vm_util.copy_virtual_disk( [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] session._wait_for_task(vmdk_copy_task) [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] return self.wait_for_task(task_ref) [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] return evt.wait() [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] result = hub.switch() [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2956.941667] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] return self.greenlet.switch() [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self.f(*self.args, **self.kw) [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] raise exceptions.translate_fault(task_info.error) [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Faults: ['InvalidArgument'] [ 2956.942110] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] [ 2956.942110] env[61649]: INFO nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Terminating instance [ 2956.942684] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2956.942890] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2956.943633] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2956.943819] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2956.944055] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ac8e4a6-c492-42de-a292-60f741390dce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.946212] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be60846b-1254-4ac9-96b4-b8b1716233ef {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.952788] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2956.952994] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50142f62-8b79-4315-bdbd-fb25a839897a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.954970] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2956.955139] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2956.956066] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6db3ebd-a053-4cfa-b0b9-8d211af8e52a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2956.960549] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 2956.960549] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52603015-49f6-22e7-40e8-23b20dc06592" [ 2956.960549] env[61649]: _type = "Task" [ 2956.960549] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2956.968096] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52603015-49f6-22e7-40e8-23b20dc06592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.017737] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2957.018243] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2957.018559] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Deleting the datastore file [datastore1] 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2957.018914] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6cec88b-0ad9-433a-bd67-5fa0ffcd141f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.024470] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Waiting for the task: (returnval){ [ 2957.024470] env[61649]: value = "task-158369" [ 2957.024470] env[61649]: _type = "Task" [ 2957.024470] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2957.031885] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Task: {'id': task-158369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.471098] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2957.471388] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating directory with path [datastore1] vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2957.471581] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e34fd2e-793c-42bc-b150-bb1d2d6cf81b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.483010] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Created directory with path [datastore1] vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2957.483189] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Fetch image to [datastore1] vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2957.483353] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2957.484093] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be3c6c7-cb11-47db-a22b-5e23a70555e5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.490778] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343ce142-6ed4-48e8-9e60-f733bc3170b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.499571] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8042cd0f-ca06-4480-8c7d-f8affaa57039 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.532908] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353c6972-0cf2-4626-b8eb-1d28b7c269b8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.539673] env[61649]: DEBUG oslo_vmware.api [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Task: {'id': task-158369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078383} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2957.541164] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2957.541365] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2957.541595] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2957.541802] env[61649]: INFO nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2957.543811] env[61649]: DEBUG nova.compute.claims [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2957.543971] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2957.544200] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2957.545997] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 108dd25598b5483eb6d72bc816ec771f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2957.546811] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f9bba616-0fbe-40fb-8ce6-9c0dd7fd0d60 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.566514] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2957.579900] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 108dd25598b5483eb6d72bc816ec771f [ 2957.616297] env[61649]: DEBUG oslo_vmware.rw_handles [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2957.673395] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614658e7-d872-4208-bb57-f074187b53a2 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.677959] env[61649]: DEBUG oslo_vmware.rw_handles [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2957.678179] env[61649]: DEBUG oslo_vmware.rw_handles [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2957.681750] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6ee1a2-07b2-47e4-89ce-e8d264c5696f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.710978] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269bfc39-67f7-4555-83ad-ef300adb5eb5 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.717453] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7575ba4f-03b6-4b0c-be9f-ea12f7846d0b {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.729846] env[61649]: DEBUG nova.compute.provider_tree [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2957.730342] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 0a91a5bde0594aa6803eb607b001de62 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2957.737442] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a91a5bde0594aa6803eb607b001de62 [ 2957.738274] env[61649]: DEBUG nova.scheduler.client.report [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2957.740424] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 9d4f8f217813443dafcbda2092b6b388 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2957.752334] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d4f8f217813443dafcbda2092b6b388 [ 2957.753044] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.209s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2957.753549] env[61649]: ERROR nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2957.753549] env[61649]: Faults: ['InvalidArgument'] [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Traceback (most recent call last): [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self.driver.spawn(context, instance, image_meta, [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self._fetch_image_if_missing(context, vi) [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] image_cache(vi, tmp_image_ds_loc) [ 2957.753549] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] vm_util.copy_virtual_disk( [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] session._wait_for_task(vmdk_copy_task) [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] return self.wait_for_task(task_ref) [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] return evt.wait() [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] result = hub.switch() [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] return self.greenlet.switch() [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2957.753864] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] self.f(*self.args, **self.kw) [ 2957.754183] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2957.754183] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] raise exceptions.translate_fault(task_info.error) [ 2957.754183] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2957.754183] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Faults: ['InvalidArgument'] [ 2957.754183] env[61649]: ERROR nova.compute.manager [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] [ 2957.754183] env[61649]: DEBUG nova.compute.utils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2957.755622] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Build of instance 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef was re-scheduled: A specified parameter was not correct: fileType [ 2957.755622] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2957.755993] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2957.756184] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2957.756367] env[61649]: DEBUG nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2957.756592] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 2957.956069] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 7f2e7da29cb647e6978a02e4710f363f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2957.964104] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f2e7da29cb647e6978a02e4710f363f [ 2957.964646] env[61649]: DEBUG nova.network.neutron [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2957.965115] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 4a329a706e244803821df6764d9fa6a6 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2957.976671] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a329a706e244803821df6764d9fa6a6 [ 2957.977163] env[61649]: INFO nova.compute.manager [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] Took 0.22 seconds to deallocate network for instance. [ 2957.978838] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 81f28e9e7f47460bb581925a068159b5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2958.008261] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81f28e9e7f47460bb581925a068159b5 [ 2958.010778] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg 7ffc13adc72a42cf87cec82979d5f9a2 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2958.038617] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ffc13adc72a42cf87cec82979d5f9a2 [ 2958.056849] env[61649]: INFO nova.scheduler.client.report [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Deleted allocations for instance 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef [ 2958.062922] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Expecting reply to msg e22a2948857a49a899aea64ec4167da4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2958.072434] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e22a2948857a49a899aea64ec4167da4 [ 2958.072994] env[61649]: DEBUG oslo_concurrency.lockutils [None req-9a7d74ea-c53c-40c6-a1a2-866b033f6bee tempest-ServerGroupTestJSON-1964393411 tempest-ServerGroupTestJSON-1964393411-project-member] Lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.773s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2958.073291] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.914s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2958.073537] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 68dec8d8-a7c8-4c78-a04e-45d7b0cffdef] During sync_power_state the instance has a pending task (spawning). Skip. [ 2958.073773] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "68dec8d8-a7c8-4c78-a04e-45d7b0cffdef" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2960.924564] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2961.928593] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2962.929341] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2962.929634] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 2962.929634] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 2962.930249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg a18633e418684d8c90f2797a5c810d99 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2962.940231] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a18633e418684d8c90f2797a5c810d99 [ 2962.942740] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 2962.942740] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 2963.928624] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2964.929666] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2964.929952] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2964.930346] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg be3c0baae431493d83ac9165a6a651b4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2964.938567] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be3c0baae431493d83ac9165a6a651b4 [ 2964.939615] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2964.939830] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2964.939991] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2964.940221] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2964.941323] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb28be1f-4121-4c85-9981-d21af35c48c3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.950907] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337b6ae9-7d51-4621-af52-51d6eaff51e9 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.966027] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dd4305-8205-41c5-8620-914aca024601 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.974207] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea2a716-ffda-446b-9952-f2fb968132be {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.010871] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181840MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2965.011039] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2965.011250] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2965.012092] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b6eb14f281624d398856cf4c71b880b7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.024217] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6eb14f281624d398856cf4c71b880b7 [ 2965.025454] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5146e830f60844d5a3144189868de36a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.034247] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5146e830f60844d5a3144189868de36a [ 2965.052749] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 7c9cff03-a7f6-41fe-b0e2-47260ba304ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2965.052949] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2965.053091] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=640MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2965.088728] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee51f733-970e-44c3-bf3e-9e05039c343e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.095923] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f76178b-e255-4335-844c-70ed0883bba7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.126117] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e39ef56-2615-4808-a83d-d373a85d20ac {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.139754] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4685ea-7c34-4f7a-8139-31b1d2834a94 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.153140] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2965.153646] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6084f2fb1ef5444382305a2ae101880e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.162279] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6084f2fb1ef5444382305a2ae101880e [ 2965.163202] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2965.165402] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 3c97f70e07484f3c8c623817c74cda47 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.177191] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c97f70e07484f3c8c623817c74cda47 [ 2965.177437] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2965.177437] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.166s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2965.890689] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquiring lock "50b9220f-9e34-4358-a9a1-c4b4d7f392e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2965.890921] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Lock "50b9220f-9e34-4358-a9a1-c4b4d7f392e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2965.891380] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 15ff3fc73890453fb4a9dbc65bf912d1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.899747] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15ff3fc73890453fb4a9dbc65bf912d1 [ 2965.900263] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Starting instance... {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2965.901809] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg ac32ce23f5574218aad91d0b05cfceee in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.931237] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac32ce23f5574218aad91d0b05cfceee [ 2965.947526] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2965.947770] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2965.949250] env[61649]: INFO nova.compute.claims [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2965.950772] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg c5c41c911dc641d5a5dc91adbfa49092 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.981121] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5c41c911dc641d5a5dc91adbfa49092 [ 2965.982668] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg fec7634fc9744837a4cdcbf015899bd1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2965.991547] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fec7634fc9744837a4cdcbf015899bd1 [ 2966.029387] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743e0cc9-6329-43bf-9e09-958598301936 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.037094] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d54a8f2-d38b-4ef9-8eea-53604a6562ce {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.066417] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a34936f-4f4e-49ca-a399-6d383848bbb3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.073483] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcadb9a-21ee-471a-9667-83fe5937d365 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.086164] env[61649]: DEBUG nova.compute.provider_tree [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2966.086664] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg d3c75bf714b748c2bcd5474ad0f6edfa in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2966.094839] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3c75bf714b748c2bcd5474ad0f6edfa [ 2966.095751] env[61649]: DEBUG nova.scheduler.client.report [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2966.098115] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 5a44cc2f7da84de1bc6c3d8c94da77d1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2966.108084] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a44cc2f7da84de1bc6c3d8c94da77d1 [ 2966.108782] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.161s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2966.109242] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Start building networks asynchronously for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2966.110925] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg b74a7bf1dcbe460f9ebf77409469b079 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2966.139141] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b74a7bf1dcbe460f9ebf77409469b079 [ 2966.140942] env[61649]: DEBUG nova.compute.utils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Using /dev/sd instead of None {{(pid=61649) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2966.141541] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 3d06c903119949de8d0c155010eb37c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2966.142492] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Allocating IP information in the background. {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2966.142665] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] allocate_for_instance() {{(pid=61649) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2966.149883] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d06c903119949de8d0c155010eb37c8 [ 2966.150400] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Start building block device mappings for instance. {{(pid=61649) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2966.152038] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 7d6c2326a9de4aafbbf75454437e78c0 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2966.179058] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d6c2326a9de4aafbbf75454437e78c0 [ 2966.181874] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg f6d5ca33d42346b38f4553afa111304e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2966.191844] env[61649]: DEBUG nova.policy [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d102c86a2e74f8f946c898e0c67fa34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4a3d9b659f64bb2a8b4bb93352516c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61649) authorize /opt/stack/nova/nova/policy.py:203}} [ 2966.211271] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6d5ca33d42346b38f4553afa111304e [ 2966.212371] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Start spawning the instance on the hypervisor. {{(pid=61649) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2966.233569] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-04-02T10:03:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-04-02T10:03:42Z,direct_url=,disk_format='vmdk',id=d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d9c1bd4c77004c3cb8e42232cad1896c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-04-02T10:03:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2966.233781] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Flavor limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2966.233933] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Image limits 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2966.234399] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Flavor pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2966.234604] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Image pref 0:0:0 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2966.234757] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61649) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2966.234959] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2966.235118] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2966.235280] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Got 1 possible topologies {{(pid=61649) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2966.235532] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2966.235720] env[61649]: DEBUG nova.virt.hardware [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61649) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2966.237140] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fb4a56-86eb-465b-a884-dbbf34e6512e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.247051] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c580b83a-07f7-435f-a0c6-465df957ea54 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.454150] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Successfully created port: 94d6e783-731a-441f-af10-c0ac6b3b85e6 {{(pid=61649) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2967.151275] env[61649]: DEBUG nova.compute.manager [req-094e46ed-a14f-4609-a75a-7b4929e8e437 req-e6408a2d-ca0d-43e2-b42c-d13a05fe9048 service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Received event network-vif-plugged-94d6e783-731a-441f-af10-c0ac6b3b85e6 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2967.151550] env[61649]: DEBUG oslo_concurrency.lockutils [req-094e46ed-a14f-4609-a75a-7b4929e8e437 req-e6408a2d-ca0d-43e2-b42c-d13a05fe9048 service nova] Acquiring lock "50b9220f-9e34-4358-a9a1-c4b4d7f392e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2967.151760] env[61649]: DEBUG oslo_concurrency.lockutils [req-094e46ed-a14f-4609-a75a-7b4929e8e437 req-e6408a2d-ca0d-43e2-b42c-d13a05fe9048 service nova] Lock "50b9220f-9e34-4358-a9a1-c4b4d7f392e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2967.151982] env[61649]: DEBUG oslo_concurrency.lockutils [req-094e46ed-a14f-4609-a75a-7b4929e8e437 req-e6408a2d-ca0d-43e2-b42c-d13a05fe9048 service nova] Lock "50b9220f-9e34-4358-a9a1-c4b4d7f392e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2967.152217] env[61649]: DEBUG nova.compute.manager [req-094e46ed-a14f-4609-a75a-7b4929e8e437 req-e6408a2d-ca0d-43e2-b42c-d13a05fe9048 service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] No waiting events found dispatching network-vif-plugged-94d6e783-731a-441f-af10-c0ac6b3b85e6 {{(pid=61649) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2967.152456] env[61649]: WARNING nova.compute.manager [req-094e46ed-a14f-4609-a75a-7b4929e8e437 req-e6408a2d-ca0d-43e2-b42c-d13a05fe9048 service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Received unexpected event network-vif-plugged-94d6e783-731a-441f-af10-c0ac6b3b85e6 for instance with vm_state building and task_state spawning. [ 2967.222547] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Successfully updated port: 94d6e783-731a-441f-af10-c0ac6b3b85e6 {{(pid=61649) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2967.223088] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 20899ca8fbd64210b0ec4ba53216ebf9 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2967.230634] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20899ca8fbd64210b0ec4ba53216ebf9 [ 2967.231365] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquiring lock "refresh_cache-50b9220f-9e34-4358-a9a1-c4b4d7f392e4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2967.231555] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquired lock "refresh_cache-50b9220f-9e34-4358-a9a1-c4b4d7f392e4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2967.231748] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Building network info cache for instance {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2009}} [ 2967.232172] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 1dfeea7c3b3a4138a590ceea3fe87a62 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2967.238869] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dfeea7c3b3a4138a590ceea3fe87a62 [ 2967.282863] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Instance cache missing network info. {{(pid=61649) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3322}} [ 2967.416324] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Updating instance_info_cache with network_info: [{"id": "94d6e783-731a-441f-af10-c0ac6b3b85e6", "address": "fa:16:3e:85:1e:40", "network": {"id": "561b13a6-51d5-4656-a109-4458f91cfba2", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-713823512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4a3d9b659f64bb2a8b4bb93352516c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d6e783-73", "ovs_interfaceid": "94d6e783-731a-441f-af10-c0ac6b3b85e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2967.416890] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg bc46409019e2435fae263cfd836f7aeb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2967.427594] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc46409019e2435fae263cfd836f7aeb [ 2967.428239] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Releasing lock "refresh_cache-50b9220f-9e34-4358-a9a1-c4b4d7f392e4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2967.428560] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Instance network_info: |[{"id": "94d6e783-731a-441f-af10-c0ac6b3b85e6", "address": "fa:16:3e:85:1e:40", "network": {"id": "561b13a6-51d5-4656-a109-4458f91cfba2", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-713823512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4a3d9b659f64bb2a8b4bb93352516c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d6e783-73", "ovs_interfaceid": "94d6e783-731a-441f-af10-c0ac6b3b85e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61649) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2967.429020] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:1e:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94d6e783-731a-441f-af10-c0ac6b3b85e6', 'vif_model': 'vmxnet3'}] {{(pid=61649) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2967.436627] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Creating folder: Project (f4a3d9b659f64bb2a8b4bb93352516c9). Parent ref: group-v51588. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2967.437164] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79ac7490-80b9-4a61-a696-47579e7ab712 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.447921] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Created folder: Project (f4a3d9b659f64bb2a8b4bb93352516c9) in parent group-v51588. [ 2967.448248] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Creating folder: Instances. Parent ref: group-v51713. {{(pid=61649) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2967.448547] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-329586e3-6cc1-42a3-8006-a55873978ab8 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.456999] env[61649]: INFO nova.virt.vmwareapi.vm_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Created folder: Instances in parent group-v51713. [ 2967.457281] env[61649]: DEBUG oslo.service.loopingcall [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61649) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2967.457521] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Creating VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2967.457788] env[61649]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91bcb580-47c7-4063-ac80-0a499ed13a4f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.475932] env[61649]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2967.475932] env[61649]: value = "task-158372" [ 2967.475932] env[61649]: _type = "Task" [ 2967.475932] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.483188] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158372, 'name': CreateVM_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.929604] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2967.929831] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances with incomplete migration {{(pid=61649) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11255}} [ 2967.930106] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d0485bb1dace4a8f8fedc923510a6414 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2967.937690] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0485bb1dace4a8f8fedc923510a6414 [ 2967.985336] env[61649]: DEBUG oslo_vmware.api [-] Task: {'id': task-158372, 'name': CreateVM_Task, 'duration_secs': 0.280738} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.985512] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Created VM on the ESX host {{(pid=61649) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2967.986109] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2967.986272] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2967.986630] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2967.986914] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a7accdc-e8ec-4bae-8bf8-9731dddd9787 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.990945] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Waiting for the task: (returnval){ [ 2967.990945] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52bdf1d7-271b-beb4-f25e-0272155d4b15" [ 2967.990945] env[61649]: _type = "Task" [ 2967.990945] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.998003] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]52bdf1d7-271b-beb4-f25e-0272155d4b15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.501249] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2968.501896] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Processing image d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2968.502281] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2969.191713] env[61649]: DEBUG nova.compute.manager [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Received event network-changed-94d6e783-731a-441f-af10-c0ac6b3b85e6 {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11120}} [ 2969.192127] env[61649]: DEBUG nova.compute.manager [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Refreshing instance network info cache due to event network-changed-94d6e783-731a-441f-af10-c0ac6b3b85e6. {{(pid=61649) external_instance_event /opt/stack/nova/nova/compute/manager.py:11125}} [ 2969.192497] env[61649]: DEBUG oslo_concurrency.lockutils [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] Acquiring lock "refresh_cache-50b9220f-9e34-4358-a9a1-c4b4d7f392e4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2969.192769] env[61649]: DEBUG oslo_concurrency.lockutils [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] Acquired lock "refresh_cache-50b9220f-9e34-4358-a9a1-c4b4d7f392e4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2969.193051] env[61649]: DEBUG nova.network.neutron [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Refreshing network info cache for port 94d6e783-731a-441f-af10-c0ac6b3b85e6 {{(pid=61649) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2006}} [ 2969.193666] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] Expecting reply to msg 97a1b0078ef5431b81562beb9a922e70 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2969.200952] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97a1b0078ef5431b81562beb9a922e70 [ 2969.400467] env[61649]: DEBUG nova.network.neutron [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Updated VIF entry in instance network info cache for port 94d6e783-731a-441f-af10-c0ac6b3b85e6. {{(pid=61649) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3481}} [ 2969.400816] env[61649]: DEBUG nova.network.neutron [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Updating instance_info_cache with network_info: [{"id": "94d6e783-731a-441f-af10-c0ac6b3b85e6", "address": "fa:16:3e:85:1e:40", "network": {"id": "561b13a6-51d5-4656-a109-4458f91cfba2", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-713823512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4a3d9b659f64bb2a8b4bb93352516c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d6e783-73", "ovs_interfaceid": "94d6e783-731a-441f-af10-c0ac6b3b85e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2969.401639] env[61649]: INFO oslo_messaging._drivers.amqpdriver [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] Expecting reply to msg 82d519cda69c4bd3b8a5452d6708ba71 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2969.413090] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82d519cda69c4bd3b8a5452d6708ba71 [ 2969.413676] env[61649]: DEBUG oslo_concurrency.lockutils [req-63ffc0ab-114e-47d3-9bef-d63ac0d4032a req-c27d9f63-2ec9-4358-8e77-ab231773cd1a service nova] Releasing lock "refresh_cache-50b9220f-9e34-4358-a9a1-c4b4d7f392e4" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2972.929064] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2972.929999] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6a35753fe2ba4a92b286e37b2ae236c8 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2972.940389] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a35753fe2ba4a92b286e37b2ae236c8 [ 2972.943685] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2972.943685] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 23273b1ecc3641179b80074ced6b72ce in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2972.949735] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23273b1ecc3641179b80074ced6b72ce [ 2978.938980] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2978.938980] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Cleaning up deleted instances {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11217}} [ 2978.938980] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 85ecac2becf44ceab963acc04ad1526b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 2978.947061] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85ecac2becf44ceab963acc04ad1526b [ 2978.947586] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] There are 0 instances to clean {{(pid=61649) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 3005.713936] env[61649]: WARNING oslo_vmware.rw_handles [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3005.713936] env[61649]: ERROR oslo_vmware.rw_handles [ 3005.713936] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3005.716118] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3005.716407] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Copying Virtual Disk [datastore1] vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/60aaa6ba-644c-48fa-841d-179be4802781/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3005.716655] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5cbdfcc-7de1-411c-b25d-5606ea18005a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.725471] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 3005.725471] env[61649]: value = "task-158373" [ 3005.725471] env[61649]: _type = "Task" [ 3005.725471] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3005.733180] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3006.236075] env[61649]: DEBUG oslo_vmware.exceptions [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3006.236370] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3006.236918] env[61649]: ERROR nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3006.236918] env[61649]: Faults: ['InvalidArgument'] [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Traceback (most recent call last): [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] yield resources [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self.driver.spawn(context, instance, image_meta, [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self._fetch_image_if_missing(context, vi) [ 3006.236918] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] image_cache(vi, tmp_image_ds_loc) [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] vm_util.copy_virtual_disk( [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] session._wait_for_task(vmdk_copy_task) [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] return self.wait_for_task(task_ref) [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] return evt.wait() [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] result = hub.switch() [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3006.237274] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] return self.greenlet.switch() [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self.f(*self.args, **self.kw) [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] raise exceptions.translate_fault(task_info.error) [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Faults: ['InvalidArgument'] [ 3006.237613] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] [ 3006.237613] env[61649]: INFO nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Terminating instance [ 3006.238845] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3006.239027] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3006.239263] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6535ba0d-b45b-40ff-bf02-b1d7dafa5ef3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.241529] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3006.241716] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3006.242411] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615545da-c4e0-473b-b437-767c101ad683 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.248533] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3006.248735] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30f503f1-c890-4525-bc5c-f13f3fa5f406 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.250688] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3006.250859] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61649) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3006.251767] env[61649]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f443eccd-5e8a-4430-98d0-ef5649a3bbba {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.256233] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Waiting for the task: (returnval){ [ 3006.256233] env[61649]: value = "session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]523fe5a1-c660-6a76-5b13-c01df3d8c1b2" [ 3006.256233] env[61649]: _type = "Task" [ 3006.256233] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3006.262771] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Task: {'id': session[525d9e61-cf32-49e5-7af6-c90bde3d60d5]523fe5a1-c660-6a76-5b13-c01df3d8c1b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3006.316371] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3006.316843] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3006.317164] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleting the datastore file [datastore1] 7c9cff03-a7f6-41fe-b0e2-47260ba304ad {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3006.317577] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fb705b7-c037-441d-ac6e-aaacb7600c69 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.323202] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Waiting for the task: (returnval){ [ 3006.323202] env[61649]: value = "task-158375" [ 3006.323202] env[61649]: _type = "Task" [ 3006.323202] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3006.330684] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3006.770403] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Preparing fetch location {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3006.770690] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Creating directory with path [datastore1] vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3006.770935] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5784b180-78b3-4b52-84e7-19819590ccfa {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.781874] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Created directory with path [datastore1] vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 {{(pid=61649) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3006.782072] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Fetch image to [datastore1] vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3006.782246] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to [datastore1] vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3006.782975] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e585bf93-250b-44df-acd4-4f64dd521273 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.790678] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2909220-8f4b-454f-be51-c71b4cadf999 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.799096] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9639005f-5731-4e1c-ae0d-a4e8199982e3 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.831183] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8383a468-a94f-46cc-b41b-9ed5be80cf2a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.843846] env[61649]: DEBUG oslo_vmware.api [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Task: {'id': task-158375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075631} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3006.844988] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3006.845178] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3006.845351] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3006.845524] env[61649]: INFO nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Took 0.60 seconds to destroy the instance on the hypervisor. [ 3006.847229] env[61649]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8fe893a0-562c-441f-8718-fe42aea1bd34 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.849037] env[61649]: DEBUG nova.compute.claims [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 3006.849274] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3006.849490] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3006.851418] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg d74c40c42e94463c964ffc68d82d3581 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3006.869621] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Downloading image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3006.882437] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d74c40c42e94463c964ffc68d82d3581 [ 3006.917531] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3006.973304] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c92655-5a57-438c-ad7d-169b55795722 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.977734] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Completed reading data from the image iterator. {{(pid=61649) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3006.977913] env[61649]: DEBUG oslo_vmware.rw_handles [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61649) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3006.981488] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a037176c-1dd7-472d-9e0b-bb068c65104a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.011113] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576fd4c0-9458-42d6-961a-f45c18700302 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.017763] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2c4ca8-ef2f-4def-8fee-87f97084298c {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.030221] env[61649]: DEBUG nova.compute.provider_tree [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3007.030699] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 85e7caebc2e6496dae97c5bea9e28982 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.038631] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85e7caebc2e6496dae97c5bea9e28982 [ 3007.039454] env[61649]: DEBUG nova.scheduler.client.report [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3007.041636] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg f8f39c897d0f49f6a6333c92539321d3 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.051362] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8f39c897d0f49f6a6333c92539321d3 [ 3007.052029] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.202s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3007.052549] env[61649]: ERROR nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3007.052549] env[61649]: Faults: ['InvalidArgument'] [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Traceback (most recent call last): [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self.driver.spawn(context, instance, image_meta, [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self._fetch_image_if_missing(context, vi) [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] image_cache(vi, tmp_image_ds_loc) [ 3007.052549] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] vm_util.copy_virtual_disk( [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] session._wait_for_task(vmdk_copy_task) [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] return self.wait_for_task(task_ref) [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] return evt.wait() [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] result = hub.switch() [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] return self.greenlet.switch() [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3007.052883] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] self.f(*self.args, **self.kw) [ 3007.053203] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3007.053203] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] raise exceptions.translate_fault(task_info.error) [ 3007.053203] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3007.053203] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Faults: ['InvalidArgument'] [ 3007.053203] env[61649]: ERROR nova.compute.manager [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] [ 3007.053344] env[61649]: DEBUG nova.compute.utils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3007.054666] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Build of instance 7c9cff03-a7f6-41fe-b0e2-47260ba304ad was re-scheduled: A specified parameter was not correct: fileType [ 3007.054666] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 3007.055040] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 3007.055210] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 3007.055381] env[61649]: DEBUG nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3007.055543] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 3007.309577] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 5dbf57fec82e42659206437a4b2ee397 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.323143] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dbf57fec82e42659206437a4b2ee397 [ 3007.323715] env[61649]: DEBUG nova.network.neutron [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3007.324381] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 03014605e68341cc845515debf24b844 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.333578] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03014605e68341cc845515debf24b844 [ 3007.333880] env[61649]: INFO nova.compute.manager [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] Took 0.28 seconds to deallocate network for instance. [ 3007.335526] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 9ff9ea35704a4a0bb53d04bde1c6a815 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.368252] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ff9ea35704a4a0bb53d04bde1c6a815 [ 3007.370869] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 07e15ec4fcf24b50846584d0f98f15d1 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.399644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07e15ec4fcf24b50846584d0f98f15d1 [ 3007.418429] env[61649]: INFO nova.scheduler.client.report [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Deleted allocations for instance 7c9cff03-a7f6-41fe-b0e2-47260ba304ad [ 3007.425464] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Expecting reply to msg 07afb9e86b034cbf8e6e11e89b9d2668 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3007.436082] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07afb9e86b034cbf8e6e11e89b9d2668 [ 3007.436608] env[61649]: DEBUG oslo_concurrency.lockutils [None req-db5054b2-87d2-4390-8979-b5487b7a60c9 tempest-ServersTestJSON-1529792186 tempest-ServersTestJSON-1529792186-project-member] Lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.788s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3007.436851] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 59.277s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3007.437010] env[61649]: INFO nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 7c9cff03-a7f6-41fe-b0e2-47260ba304ad] During sync_power_state the instance has a pending task (spawning). Skip. [ 3007.437188] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "7c9cff03-a7f6-41fe-b0e2-47260ba304ad" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3011.939811] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3012.929601] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3012.929601] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3012.929601] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 3020.924077] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3022.929453] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3022.929914] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 3022.929914] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 3022.930416] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 8dbcdb25543c4b0dacccf856e7e2374f in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3022.939975] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dbcdb25543c4b0dacccf856e7e2374f [ 3022.940897] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Skipping network cache update for instance because it is Building. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 3022.941046] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 3023.929502] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3024.930032] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3026.929567] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3026.929942] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3026.930182] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg ad30bde6fb424a4aaad35e0ee9505b8a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3026.938831] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad30bde6fb424a4aaad35e0ee9505b8a [ 3026.939759] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3026.939965] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3026.940174] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3026.940337] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 3026.941413] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76772d9-5a4b-4d0a-a0f1-109454974412 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.949963] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab8be3b-68e8-4e11-81d4-75b32f4850ee {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.964065] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdac099-3bb9-4a9a-b6ee-52059e81d87a {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.969757] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3a65fa-613d-47fd-bd4e-628520f34d4d {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.997518] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181822MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 3026.997670] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3026.997852] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3026.998644] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5ec83e03c9a443e5a8dd5957b659bfcc in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3027.010579] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ec83e03c9a443e5a8dd5957b659bfcc [ 3027.011681] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg d791c813c9e94ca6ad9aeb108d476c50 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3027.019471] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d791c813c9e94ca6ad9aeb108d476c50 [ 3027.122185] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Instance 50b9220f-9e34-4358-a9a1-c4b4d7f392e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61649) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3027.122402] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 3027.122549] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=640MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 3027.137277] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing inventories for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 3027.148765] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating ProviderTree inventory for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 3027.148936] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Updating inventory in ProviderTree for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3027.158809] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing aggregate associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, aggregates: None {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 3027.173402] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Refreshing trait associations for resource provider dad32f24-3843-462d-a3f9-4ef2a60037c4, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61649) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 3027.196173] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c16b42-5343-41c1-9df8-7cf291f1bc58 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.203339] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a187c7b7-f51c-4508-b2b8-815435f856c1 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.231866] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad8d346-eb60-4fef-adb6-a4ebe9e53672 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.238311] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6e1a88-461c-402c-b6b7-223e66df8b11 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.252063] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3027.252531] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 590d71672b214933b3f5aab33eddaf23 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3027.259472] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 590d71672b214933b3f5aab33eddaf23 [ 3027.260344] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3027.262489] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 6c2b43069c6e4ed3a99bf4a8c66a32eb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3027.272423] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c2b43069c6e4ed3a99bf4a8c66a32eb [ 3027.273007] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 3027.273197] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.275s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3051.003646] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9d3657cfbd7847efa19eb031c040e682 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3051.013239] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d3657cfbd7847efa19eb031c040e682 [ 3056.453474] env[61649]: WARNING oslo_vmware.rw_handles [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles response.begin() [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3056.453474] env[61649]: ERROR oslo_vmware.rw_handles [ 3056.454092] env[61649]: DEBUG nova.virt.vmwareapi.images [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Downloaded image file data d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11 to vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk on the data store datastore1 {{(pid=61649) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3056.455969] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Caching image {{(pid=61649) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3056.456253] env[61649]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Copying Virtual Disk [datastore1] vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/tmp-sparse.vmdk to [datastore1] vmware_temp/fdd3aec7-0d46-4753-aa25-e3d3d6bcbf09/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk {{(pid=61649) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3056.456538] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-909369aa-84c8-4f21-9b0f-06adc38efa38 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.463898] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Waiting for the task: (returnval){ [ 3056.463898] env[61649]: value = "task-158376" [ 3056.463898] env[61649]: _type = "Task" [ 3056.463898] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.471350] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Task: {'id': task-158376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.974278] env[61649]: DEBUG oslo_vmware.exceptions [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Fault InvalidArgument not matched. {{(pid=61649) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3056.974616] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11/d1cd53dd-702d-47cc-aaaf-dcf09a8c9d11.vmdk" {{(pid=61649) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3056.975191] env[61649]: ERROR nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3056.975191] env[61649]: Faults: ['InvalidArgument'] [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Traceback (most recent call last): [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] yield resources [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self.driver.spawn(context, instance, image_meta, [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self._fetch_image_if_missing(context, vi) [ 3056.975191] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] image_cache(vi, tmp_image_ds_loc) [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] vm_util.copy_virtual_disk( [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] session._wait_for_task(vmdk_copy_task) [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] return self.wait_for_task(task_ref) [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] return evt.wait() [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] result = hub.switch() [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3056.975904] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] return self.greenlet.switch() [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self.f(*self.args, **self.kw) [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] raise exceptions.translate_fault(task_info.error) [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Faults: ['InvalidArgument'] [ 3056.976751] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] [ 3056.976751] env[61649]: INFO nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Terminating instance [ 3056.978314] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Start destroying the instance on the hypervisor. {{(pid=61649) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3056.978544] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Destroying instance {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3056.979279] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd25af0-7646-42ac-b6e8-54bc73426197 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.985827] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Unregistering the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3056.986061] env[61649]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00a37e16-6814-494f-b09d-0e5ff4bef235 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.046632] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Unregistered the VM {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3057.046853] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Deleting contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3057.047029] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Deleting the datastore file [datastore1] 50b9220f-9e34-4358-a9a1-c4b4d7f392e4 {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3057.047299] env[61649]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72acb175-c564-4600-a9ea-f1fe4a0770fb {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.053466] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Waiting for the task: (returnval){ [ 3057.053466] env[61649]: value = "task-158378" [ 3057.053466] env[61649]: _type = "Task" [ 3057.053466] env[61649]: } to complete. {{(pid=61649) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3057.060829] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Task: {'id': task-158378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3057.562935] env[61649]: DEBUG oslo_vmware.api [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Task: {'id': task-158378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063799} completed successfully. {{(pid=61649) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3057.563486] env[61649]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Deleted the datastore file {{(pid=61649) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3057.563680] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Deleted contents of the VM from datastore datastore1 {{(pid=61649) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3057.563851] env[61649]: DEBUG nova.virt.vmwareapi.vmops [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Instance destroyed {{(pid=61649) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3057.564041] env[61649]: INFO nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Took 0.59 seconds to destroy the instance on the hypervisor. [ 3057.566229] env[61649]: DEBUG nova.compute.claims [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Aborting claim: {{(pid=61649) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 3057.566405] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3057.566612] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3057.568429] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg b3ff59c08849408a96ed5b486a123de4 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3057.600178] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3ff59c08849408a96ed5b486a123de4 [ 3057.627695] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c959bf17-a58c-4159-a967-2f55e0c2baa7 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.634761] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42841064-c41b-4f0f-8080-0b39503815b0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.664981] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbca34a3-3f3f-485a-ae80-78174b9baa8f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.672113] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc72a4a-52e8-4130-92a9-ae379b8c893e {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.684772] env[61649]: DEBUG nova.compute.provider_tree [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3057.685249] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 0fb385fc4d314a139284ea1338a9207c in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3057.692545] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fb385fc4d314a139284ea1338a9207c [ 3057.693397] env[61649]: DEBUG nova.scheduler.client.report [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3057.695469] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 2f1dd82a281a4c439d8a49f83b63d543 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3057.705171] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f1dd82a281a4c439d8a49f83b63d543 [ 3057.705817] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.139s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3057.706326] env[61649]: ERROR nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3057.706326] env[61649]: Faults: ['InvalidArgument'] [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Traceback (most recent call last): [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self.driver.spawn(context, instance, image_meta, [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self._fetch_image_if_missing(context, vi) [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] image_cache(vi, tmp_image_ds_loc) [ 3057.706326] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] vm_util.copy_virtual_disk( [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] session._wait_for_task(vmdk_copy_task) [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] return self.wait_for_task(task_ref) [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] return evt.wait() [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] result = hub.switch() [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] return self.greenlet.switch() [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3057.706652] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] self.f(*self.args, **self.kw) [ 3057.707001] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3057.707001] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] raise exceptions.translate_fault(task_info.error) [ 3057.707001] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3057.707001] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Faults: ['InvalidArgument'] [ 3057.707001] env[61649]: ERROR nova.compute.manager [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] [ 3057.707001] env[61649]: DEBUG nova.compute.utils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] VimFaultException {{(pid=61649) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3057.708362] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Build of instance 50b9220f-9e34-4358-a9a1-c4b4d7f392e4 was re-scheduled: A specified parameter was not correct: fileType [ 3057.708362] env[61649]: Faults: ['InvalidArgument'] {{(pid=61649) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 3057.708777] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Unplugging VIFs for instance {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 3057.708951] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61649) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 3057.709120] env[61649]: DEBUG nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Deallocating network for instance {{(pid=61649) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3057.709284] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] deallocate_for_instance() {{(pid=61649) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1802}} [ 3057.964454] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg bd0bdb8be8d94b0d8649fce2c739f41a in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3057.972664] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd0bdb8be8d94b0d8649fce2c739f41a [ 3057.973153] env[61649]: DEBUG nova.network.neutron [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Updating instance_info_cache with network_info: [] {{(pid=61649) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3057.973606] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg eea3fada5e2f40388571c50348121da5 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3057.985156] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eea3fada5e2f40388571c50348121da5 [ 3057.985758] env[61649]: INFO nova.compute.manager [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] [instance: 50b9220f-9e34-4358-a9a1-c4b4d7f392e4] Took 0.28 seconds to deallocate network for instance. [ 3057.987392] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 74a870c0083343ccabb31b6fd2d50943 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3058.018534] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74a870c0083343ccabb31b6fd2d50943 [ 3058.021137] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 1e8e1ca035ca4de4ae00fe84ab947acb in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3058.050202] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e8e1ca035ca4de4ae00fe84ab947acb [ 3058.067563] env[61649]: INFO nova.scheduler.client.report [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Deleted allocations for instance 50b9220f-9e34-4358-a9a1-c4b4d7f392e4 [ 3058.073303] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Expecting reply to msg 2259590338c84ea0961e3b26cf117417 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3058.082027] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2259590338c84ea0961e3b26cf117417 [ 3058.082507] env[61649]: DEBUG oslo_concurrency.lockutils [None req-4a5d8ac0-3dd1-4679-8bbb-d34452618d2d tempest-ServersNegativeTestMultiTenantJSON-636493145 tempest-ServersNegativeTestMultiTenantJSON-636493145-project-member] Lock "50b9220f-9e34-4358-a9a1-c4b4d7f392e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.192s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3074.274856] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3074.929286] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3074.929553] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3074.929678] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61649) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10536}} [ 3080.924440] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3082.928926] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3082.929313] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Starting heal instance info cache {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9917}} [ 3082.929313] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Rebuilding the list of instances to heal {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9921}} [ 3082.929823] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 730b4b5cc77a491da6ccca2e1fae3565 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3082.937761] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 730b4b5cc77a491da6ccca2e1fae3565 [ 3082.938369] env[61649]: DEBUG nova.compute.manager [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Didn't find any instances for network info cache update. {{(pid=61649) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10003}} [ 3084.929750] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3085.929224] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3086.929605] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3086.930042] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 5ab7a6b676734fd58cfb4811ad70b40b in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3086.939451] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ab7a6b676734fd58cfb4811ad70b40b [ 3086.940457] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3086.940678] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3086.940851] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3086.941002] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61649) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 3086.942081] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cef4574-6535-416e-83ca-54e4a82c0c93 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.950783] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb8edc7-879e-471f-a748-4d8e119571d4 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.964971] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4018696-9ed1-4cbe-bcb2-38aca9434d5f {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.971571] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfee32dd-25a7-4ffe-8dd1-6a5e644dc558 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.000332] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181825MB free_disk=197GB free_vcpus=48 pci_devices=None {{(pid=61649) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 3087.000468] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3087.000647] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3087.001423] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 2431e4fc8e7f47ea9a2954e06241ad9e in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3087.009635] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2431e4fc8e7f47ea9a2954e06241ad9e [ 3087.010203] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg b379622113214a8191492ad91aa0bb30 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3087.018061] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b379622113214a8191492ad91aa0bb30 [ 3087.031863] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 3087.032039] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61649) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 3087.045587] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815fa877-86d8-4422-bf25-9eacd9c6e109 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.052859] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3b7faa-efcb-49b3-96a1-7b4f7b20fda0 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.083132] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99a4f9c-cb4e-4f30-af39-6fe1e832c971 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.089837] env[61649]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e02a25d-58f5-409d-9fd4-b2e58b1e1339 {{(pid=61649) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.102301] env[61649]: DEBUG nova.compute.provider_tree [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed in ProviderTree for provider: dad32f24-3843-462d-a3f9-4ef2a60037c4 {{(pid=61649) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3087.102737] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg 06b6baa4297f47ce92d63cfca3dc25a7 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3087.109354] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06b6baa4297f47ce92d63cfca3dc25a7 [ 3087.110162] env[61649]: DEBUG nova.scheduler.client.report [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Inventory has not changed for provider dad32f24-3843-462d-a3f9-4ef2a60037c4 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 197, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61649) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3087.112191] env[61649]: INFO oslo_messaging._drivers.amqpdriver [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Expecting reply to msg dc240bc6739d4b6e8e7505fb6aa92844 in queue reply_17c3d98394d943e0a538ced2a50ef815 [ 3087.121855] env[61649]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc240bc6739d4b6e8e7505fb6aa92844 [ 3087.122448] env[61649]: DEBUG nova.compute.resource_tracker [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61649) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 3087.122627] env[61649]: DEBUG oslo_concurrency.lockutils [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.122s {{(pid=61649) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3089.123201] env[61649]: DEBUG oslo_service.periodic_task [None req-146045c5-83e9-4ca8-997c-f9fc3ede4b55 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61649) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}